summaryrefslogtreecommitdiffstats
path: root/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-11 08:17:27 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-11 08:17:27 +0000
commitf215e02bf85f68d3a6106c2a1f4f7f063f819064 (patch)
tree6bb5b92c046312c4e95ac2620b10ddf482d3fa8b /src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python
parentInitial commit. (diff)
downloadvirtualbox-f215e02bf85f68d3a6106c2a1f4f7f063f819064.tar.xz
virtualbox-f215e02bf85f68d3a6106c2a1f4f7f063f819064.zip
Adding upstream version 7.0.14-dfsg.upstream/7.0.14-dfsg
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python')
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AmlToC/AmlToC.py142
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGen.py113
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGenWorker.py329
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/BuildEngine.py650
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/DataPipe.py169
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenC.py2111
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenDepex.py464
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenMake.py1810
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenPcdDb.py1615
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenVar.py366
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IdfClassObject.py132
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IncludesAutoGen.py304
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/InfSectionParser.py119
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py2456
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py674
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py1603
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/StrGather.py630
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/UniClassObject.py683
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py280
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/WorkspaceAutoGen.py971
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/__init__.py11
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/BPDG.py158
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/GenVpd.py689
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/StringTable.py72
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/__init__.py9
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Capsule/GenerateCapsule.py1051
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Capsule/GenerateWindowsDriver.py120
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Capsule/WindowsCapsuleSupportHelper.py64
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/BuildToolError.py160
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/BuildVersion.py10
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/DataType.py539
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Edk2/Capsule/FmpPayloadHeader.py85
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Edk2/Capsule/__init__.py9
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Edk2/__init__.py9
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/EdkLogger.py421
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Expression.py1054
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/GlobalData.py124
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/LongFilePathOs.py79
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/LongFilePathOsPath.py47
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/LongFilePathSupport.py45
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Misc.py1929
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/MultipleWorkspace.py150
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Parsing.py906
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/RangeExpression.py694
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/StringUtils.py873
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/TargetTxtClassObject.py199
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/ToolDefClassObject.py290
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/CapsuleDependency.py409
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/FmpAuthHeader.py190
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/FmpCapsuleHeader.py310
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/UefiCapsuleHeader.py130
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/__init__.py9
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/__init__.py9
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/VariableAttributes.py51
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/VpdInfoFile.py255
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/__init__.py9
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/caching.py41
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/CommonClass.py91
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/DataClass.py369
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/Exceptions.py23
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/FdfClass.py312
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/__init__.py9
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/C.g673
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser3/CLexer.py4941
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser3/CParser.py18833
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser3/__init__.py0
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/C.g4631
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/CLexer.py626
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/CListener.py809
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/CParser.py6273
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/__init__.py0
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Check.py1535
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CodeFragment.py159
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py595
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Configuration.py444
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Database.py340
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/EccGlobalData.py21
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/EccMain.py415
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/EccToolError.py205
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Exception.py83
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/FileProfile.py51
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaDataParser.py271
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py213
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py2089
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py329
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/__init__.py9
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/ParserWarning.py24
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py225
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Xml/__init__.py14
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/__init__.py9
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/c.py2654
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/config.ini281
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/exception.xml951
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser3/CLexer.py4941
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser3/CParser.py18833
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser3/__init__.py0
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/CLexer.py627
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/CListener.py808
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/CParser.py6273
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/__init__.py0
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CodeFragment.py179
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CodeFragmentCollector.py435
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Database.py249
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/EotGlobalData.py105
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/EotMain.py1713
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/EotToolError.py15
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/FileProfile.py54
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Identification.py52
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/InfParserLite.py148
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Parser.py869
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/ParserWarning.py20
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Report.py468
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/__init__.py9
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/c.py383
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GNUmakefile12
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/AprioriSection.py121
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Capsule.py250
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/CapsuleData.py239
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/CompressSection.py96
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/DataSection.py117
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/DepexSection.py111
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/EfiSection.py318
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Fd.py155
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FdfParser.py4526
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Ffs.py49
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FfsFileStatement.py175
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FfsInfStatement.py1128
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Fv.py431
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FvImageSection.py158
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/GenFds.py800
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py1033
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/GuidSection.py278
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptRomFileStatement.py48
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptRomInfStatement.py159
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptionRom.py131
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Region.py348
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Rule.py23
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/RuleComplexFile.py25
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/RuleSimpleFile.py25
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Section.py153
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/UiSection.py74
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/VerSection.py76
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/__init__.py9
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py228
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenPatchPcdTable/__init__.py9
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Makefile13
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py280
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/PatchPcdValue/__init__.py9
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/Pkcs7Sign.py280
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/Readme.md158
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestCert.pem60
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestCert.pub.pem25
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.cerbin0 -> 1008 bytes
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer.gEfiSecurityPkgTokenSpaceGuid.PcdPkcs7CertBuffer.inc1
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer.gFmpDevicePkgTokenSpaceGuid.PcdFmpDevicePkcs7CertBufferXdr.inc1
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.pem58
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.pub.pem23
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestSub.pem59
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestSub.pub.pem23
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/README.md29
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py170
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py235
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPrivateKey.pem27
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPublicKey.bin1
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPublicKey.txt1
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Split/Split.py210
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Split/__init__.py10
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/Table.py114
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableDataModel.py90
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableDec.py103
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableDsc.py103
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableEotReport.py71
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableFdf.py104
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableFile.py99
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableFunction.py90
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableIdentifier.py85
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableInf.py109
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TablePcd.py85
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableQuery.py63
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableReport.py127
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/__init__.py9
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/TargetTool/TargetTool.py254
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/TargetTool/__init__.py9
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Trim/Trim.py627
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/BuildVersion.py14
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/DependencyRules.py448
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/DistributionPackageClass.py267
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/FileHook.py193
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/IpiDb.py922
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/PackageFile.py250
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/__init__.py14
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py683
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py1122
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenMetaFileMisc.py188
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenXmlFile.py12
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/__init__.py14
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/InstallPkg.py967
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/InventoryWs.py111
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/CommentGenerating.py238
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/CommentParsing.py593
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/DataType.py949
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/ExpressionValidate.py567
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/GlobalData.py110
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Misc.py989
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/ParserValidate.py727
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Parsing.py1015
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/StringUtils.py982
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/UniClassObject.py1074
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py223
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Xml/__init__.py14
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/__init__.py14
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/Log.py319
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/StringTable.py860
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/ToolError.py171
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/__init__.py14
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/MkPkg.py274
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/CommonObject.py953
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py654
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/PackageObject.py192
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/__init__.py14
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/DecObject.py605
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py686
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfBuildOptionObject.py87
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfCommonObject.py156
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfDefineCommonObject.py83
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py1002
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfDepexObject.py160
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py347
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py113
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py247
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py142
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py181
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py669
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py337
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py305
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py233
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfUserExtensionObject.py127
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/__init__.py14
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/__init__.py14
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/DecParser.py1091
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/DecParserMisc.py364
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py283
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py226
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfBuildOptionSectionParser.py212
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py191
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py98
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py368
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py197
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py134
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfParser.py680
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py216
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py178
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfSectionParser.py493
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py139
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/__init__.py14
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py1000
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py1071
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py255
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/__init__.py14
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/ReplacePkg.py142
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/RmPkg.py270
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/TestInstall.py94
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UPT.py347
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/CommentGeneratingUnitTest.py1414
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/CommentParsingUnitTest.py917
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/DecParserTest.py279
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py528
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/InfBinarySectionTest.py381
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/CommonXml.py997
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/GuidProtocolPpiXml.py278
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/IniToXml.py496
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py1003
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/PackageSurfaceAreaXml.py402
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/PcdXml.py555
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/XmlParser.py926
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/XmlParserMisc.py95
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/__init__.py14
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/BuildClassObject.py646
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/DecBuildData.py475
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/DscBuildData.py3588
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/InfBuildData.py1064
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaDataTable.py306
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaFileCommentParser.py45
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaFileParser.py2193
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaFileTable.py430
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/WorkspaceCommon.py256
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py204
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/__init__.py9
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/basetool_tiano_python_path_env.yaml11
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/BuildReport.py2316
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/__init__.py9
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/build.py2796
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/buildoptions.py105
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/sitecustomize.py15
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/tests/Split/test_split.py115
295 files changed, 175243 insertions, 0 deletions
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AmlToC/AmlToC.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AmlToC/AmlToC.py
new file mode 100755
index 00000000..25477426
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AmlToC/AmlToC.py
@@ -0,0 +1,142 @@
+## @file
+#
+# Convert an AML file to a .c file containing the AML bytecode stored in a
+# C array.
+# By default, "Tables\Dsdt.aml" will generate "Tables\Dsdt.c".
+# "Tables\Dsdt.c" will contain a C array named "dsdt_aml_code" that contains
+# the AML bytecode.
+#
+# Copyright (c) 2020, ARM Limited. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+import argparse
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+import sys
+import os
+
+__description__ = """
+Convert an AML file to a .c file containing the AML bytecode stored in a C
+array. By default, Tables\Dsdt.aml will generate Tables\Dsdt.c.
+Tables\Dsdt.c will contain a C array named "dsdt_aml_code" that contains
+the AML bytecode.
+"""
+
+## Parse the command line arguments.
+#
+# @retval A argparse.NameSpace instance, containing parsed values.
+#
+def ParseArgs():
+ # Initialize the parser.
+ Parser = argparse.ArgumentParser(description=__description__)
+
+ # Define the possible arguments.
+ Parser.add_argument(dest="InputFile",
+ help="Path to an input AML file to generate a .c file from.")
+ Parser.add_argument("-o", "--out-dir", dest="OutDir",
+ help="Output directory where the .c file will be generated. Default is the input file's directory.")
+
+ # Parse the input arguments.
+ Args = Parser.parse_args()
+ SplitInputName = ""
+
+ if not os.path.exists(Args.InputFile):
+ EdkLogger.error(__file__, FILE_OPEN_FAILURE,
+ ExtraData=Args.InputFile)
+ return None
+ else:
+ with open(Args.InputFile, "rb") as fIn:
+ Signature = str(fIn.read(4))
+ if ("DSDT" not in Signature) and ("SSDT" not in Signature):
+ EdkLogger.info("Invalid file type. File does not have a valid DSDT or SSDT signature: {}".format(Args.InputFile))
+ return None
+
+ # Get the basename of the input file.
+ SplitInputName = os.path.splitext(Args.InputFile)
+ BaseName = os.path.basename(SplitInputName[0])
+
+ # If no output directory is specified, output to the input directory.
+ if not Args.OutDir:
+ Args.OutputFile = os.path.join(os.path.dirname(Args.InputFile),
+ BaseName + ".c")
+ else:
+ if not os.path.exists(Args.OutDir):
+ os.mkdir(Args.OutDir)
+ Args.OutputFile = os.path.join(Args.OutDir, BaseName + ".c")
+
+ Args.BaseName = BaseName
+
+ return Args
+
+## Convert an AML file to a .c file containing the AML bytecode stored
+# in a C array.
+#
+# @param InputFile Path to the input AML file.
+# @param OutputFile Path to the output .c file to generate.
+# @param BaseName Base name of the input file.
+# This is also the name of the generated .c file.
+#
+def AmlToC(InputFile, OutputFile, BaseName):
+
+ ArrayName = BaseName.lower() + "_aml_code"
+ FileHeader =\
+"""
+// This file has been generated from:
+// -Python script: {}
+// -Input AML file: {}
+
+"""
+
+ with open(InputFile, "rb") as fIn, open(OutputFile, "w") as fOut:
+ # Write header.
+ fOut.write(FileHeader.format(os.path.abspath(InputFile), os.path.abspath(__file__)))
+
+ # Write the array and its content.
+ fOut.write("unsigned char {}[] = {{\n ".format(ArrayName))
+ cnt = 0
+ byte = fIn.read(1)
+ while len(byte) != 0:
+ fOut.write("0x{0:02X}, ".format(ord(byte)))
+ cnt += 1
+ if (cnt % 8) == 0:
+ fOut.write("\n ")
+ byte = fIn.read(1)
+ fOut.write("\n};\n")
+
+## Main method
+#
+# This method:
+# 1- Initialize an EdkLogger instance.
+# 2- Parses the input arguments.
+# 3- Converts an AML file to a .c file containing the AML bytecode stored
+# in a C array.
+#
+# @retval 0 Success.
+# @retval 1 Error.
+#
+def Main():
+ # Initialize an EdkLogger instance.
+ EdkLogger.Initialize()
+
+ try:
+ # Parse the input arguments.
+ CommandArguments = ParseArgs()
+ if not CommandArguments:
+ return 1
+
+ # Convert an AML file to a .c file containing the AML bytecode stored
+ # in a C array.
+ AmlToC(CommandArguments.InputFile, CommandArguments.OutputFile, CommandArguments.BaseName)
+ except Exception as e:
+ print(e)
+ return 1
+
+ return 0
+
+if __name__ == '__main__':
+ r = Main()
+ # 0-127 is a safe return range, and 1 is a standard default error
+ if r < 0 or r > 127: r = 1
+ sys.exit(r)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGen.py
new file mode 100755
index 00000000..8e4c54bb
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGen.py
@@ -0,0 +1,113 @@
+## @file
+# Generate AutoGen.h, AutoGen.c and *.depex files
+#
+# Copyright (c) 2007 - 2019, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2018, Hewlett Packard Enterprise Development, L.P.<BR>
+# Copyright (c) 2019, American Megatrends, Inc. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+from Common.DataType import TAB_STAR
+## Base class for AutoGen
+#
+# This class just implements the cache mechanism of AutoGen objects.
+#
+class AutoGen(object):
+ # database to maintain the objects in each child class
+ __ObjectCache = {} # (BuildTarget, ToolChain, ARCH, platform file): AutoGen object
+
+ ## Factory method
+ #
+ # @param Class class object of real AutoGen class
+ # (WorkspaceAutoGen, ModuleAutoGen or PlatformAutoGen)
+ # @param Workspace Workspace directory or WorkspaceAutoGen object
+ # @param MetaFile The path of meta file
+ # @param Target Build target
+ # @param Toolchain Tool chain name
+ # @param Arch Target arch
+ # @param *args The specific class related parameters
+ # @param **kwargs The specific class related dict parameters
+ #
+
+ def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+ # check if the object has been created
+ Key = (Target, Toolchain, Arch, MetaFile)
+ if Key in cls.__ObjectCache:
+ # if it exists, just return it directly
+ return cls.__ObjectCache[Key]
+ # it didnt exist. create it, cache it, then return it
+ RetVal = cls.__ObjectCache[Key] = super(AutoGen, cls).__new__(cls)
+ return RetVal
+
+
+ ## hash() operator
+ #
+ # The file path of platform file will be used to represent hash value of this object
+ #
+ # @retval int Hash value of the file path of platform file
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
+
+ ## str() operator
+ #
+ # The file path of platform file will be used to represent this object
+ #
+ # @retval string String of platform file path
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## "==" operator
+ def __eq__(self, Other):
+ return Other and self.MetaFile == Other
+
+ @classmethod
+ def Cache(cls):
+ return cls.__ObjectCache
+
+#
+# The priority list while override build option
+#
+PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)
+ "0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
+ "0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
+ "0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE
+ "0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE
+ "0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE
+ "0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE
+ "0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE
+ "0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE
+ "0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE
+ "0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE
+ "0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE
+ "0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)
+## Calculate the priority value of the build option
+#
+# @param Key Build option definition contain: TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
+#
+# @retval Value Priority value based on the priority list.
+#
+def CalculatePriorityValue(Key):
+ Target, ToolChain, Arch, CommandType, Attr = Key.split('_')
+ PriorityValue = 0x11111
+ if Target == TAB_STAR:
+ PriorityValue &= 0x01111
+ if ToolChain == TAB_STAR:
+ PriorityValue &= 0x10111
+ if Arch == TAB_STAR:
+ PriorityValue &= 0x11011
+ if CommandType == TAB_STAR:
+ PriorityValue &= 0x11101
+ if Attr == TAB_STAR:
+ PriorityValue &= 0x11110
+
+ return PrioList["0x%0.5x" % PriorityValue]
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGenWorker.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGenWorker.py
new file mode 100755
index 00000000..d392ffb2
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGenWorker.py
@@ -0,0 +1,329 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from __future__ import absolute_import
+import multiprocessing as mp
+import threading
+from Common.Misc import PathClass
+from AutoGen.ModuleAutoGen import ModuleAutoGen
+from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo
+import Common.GlobalData as GlobalData
+import Common.EdkLogger as EdkLogger
+import os
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from AutoGen.AutoGen import AutoGen
+from Workspace.WorkspaceDatabase import BuildDB
+try:
+ from queue import Empty
+except:
+ from Queue import Empty
+import traceback
+import sys
+from AutoGen.DataPipe import MemoryDataPipe
+import logging
+import time
+
+def clearQ(q):
+ try:
+ while True:
+ q.get_nowait()
+ except Empty:
+ pass
+
+class LogAgent(threading.Thread):
+ def __init__(self,log_q,log_level,log_file=None):
+ super(LogAgent,self).__init__()
+ self.log_q = log_q
+ self.log_level = log_level
+ self.log_file = log_file
+ def InitLogger(self):
+ # For DEBUG level (All DEBUG_0~9 are applicable)
+ self._DebugLogger_agent = logging.getLogger("tool_debug_agent")
+ _DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")
+ self._DebugLogger_agent.setLevel(self.log_level)
+ _DebugChannel = logging.StreamHandler(sys.stdout)
+ _DebugChannel.setFormatter(_DebugFormatter)
+ self._DebugLogger_agent.addHandler(_DebugChannel)
+
+ # For VERBOSE, INFO, WARN level
+ self._InfoLogger_agent = logging.getLogger("tool_info_agent")
+ _InfoFormatter = logging.Formatter("%(message)s")
+ self._InfoLogger_agent.setLevel(self.log_level)
+ _InfoChannel = logging.StreamHandler(sys.stdout)
+ _InfoChannel.setFormatter(_InfoFormatter)
+ self._InfoLogger_agent.addHandler(_InfoChannel)
+
+ # For ERROR level
+ self._ErrorLogger_agent = logging.getLogger("tool_error_agent")
+ _ErrorFormatter = logging.Formatter("%(message)s")
+ self._ErrorLogger_agent.setLevel(self.log_level)
+ _ErrorCh = logging.StreamHandler(sys.stderr)
+ _ErrorCh.setFormatter(_ErrorFormatter)
+ self._ErrorLogger_agent.addHandler(_ErrorCh)
+
+ if self.log_file:
+ if os.path.exists(self.log_file):
+ os.remove(self.log_file)
+ _Ch = logging.FileHandler(self.log_file)
+ _Ch.setFormatter(_DebugFormatter)
+ self._DebugLogger_agent.addHandler(_Ch)
+
+ _Ch= logging.FileHandler(self.log_file)
+ _Ch.setFormatter(_InfoFormatter)
+ self._InfoLogger_agent.addHandler(_Ch)
+
+ _Ch = logging.FileHandler(self.log_file)
+ _Ch.setFormatter(_ErrorFormatter)
+ self._ErrorLogger_agent.addHandler(_Ch)
+
+ def run(self):
+ self.InitLogger()
+ while True:
+ log_message = self.log_q.get()
+ if log_message is None:
+ break
+ if log_message.name == "tool_error":
+ self._ErrorLogger_agent.log(log_message.levelno,log_message.getMessage())
+ elif log_message.name == "tool_info":
+ self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
+ elif log_message.name == "tool_debug":
+ self._DebugLogger_agent.log(log_message.levelno,log_message.getMessage())
+ else:
+ self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
+
+ def kill(self):
+ self.log_q.put(None)
+class AutoGenManager(threading.Thread):
+ def __init__(self,autogen_workers, feedback_q,error_event):
+ super(AutoGenManager,self).__init__()
+ self.autogen_workers = autogen_workers
+ self.feedback_q = feedback_q
+ self.Status = True
+ self.error_event = error_event
+ def run(self):
+ try:
+ fin_num = 0
+ while True:
+ badnews = self.feedback_q.get()
+ if badnews is None:
+ break
+ if badnews == "Done":
+ fin_num += 1
+ elif badnews == "QueueEmpty":
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))
+ self.TerminateWorkers()
+ else:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))
+ self.Status = False
+ self.TerminateWorkers()
+ if fin_num == len(self.autogen_workers):
+ self.clearQueue()
+ for w in self.autogen_workers:
+ w.join()
+ break
+ except Exception:
+ return
+
+ def clearQueue(self):
+ taskq = self.autogen_workers[0].module_queue
+ logq = self.autogen_workers[0].log_q
+ clearQ(taskq)
+ clearQ(self.feedback_q)
+ clearQ(logq)
+ # Copy the cache queue itmes to parent thread before clear
+ cacheq = self.autogen_workers[0].cache_q
+ try:
+ cache_num = 0
+ while True:
+ item = cacheq.get()
+ if item == "CacheDone":
+ cache_num += 1
+ else:
+ GlobalData.gModuleAllCacheStatus.add(item)
+ if cache_num == len(self.autogen_workers):
+ break
+ except:
+ print ("cache_q error")
+
+ def TerminateWorkers(self):
+ self.error_event.set()
+ def kill(self):
+ self.feedback_q.put(None)
+class AutoGenWorkerInProcess(mp.Process):
+ def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_q,log_q,error_event):
+ mp.Process.__init__(self)
+ self.module_queue = module_queue
+ self.data_pipe_file_path =data_pipe_file_path
+ self.data_pipe = None
+ self.feedback_q = feedback_q
+ self.PlatformMetaFileSet = {}
+ self.file_lock = file_lock
+ self.cache_q = cache_q
+ self.log_q = log_q
+ self.error_event = error_event
+ def GetPlatformMetaFile(self,filepath,root):
+ try:
+ return self.PlatformMetaFileSet[(filepath,root)]
+ except:
+ self.PlatformMetaFileSet[(filepath,root)] = filepath
+ return self.PlatformMetaFileSet[(filepath,root)]
+ def run(self):
+ try:
+ taskname = "Init"
+ with self.file_lock:
+ try:
+ self.data_pipe = MemoryDataPipe()
+ self.data_pipe.load(self.data_pipe_file_path)
+ except:
+ self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)
+ EdkLogger.LogClientInitialize(self.log_q)
+ loglevel = self.data_pipe.Get("LogLevel")
+ if not loglevel:
+ loglevel = EdkLogger.INFO
+ EdkLogger.SetLevel(loglevel)
+ target = self.data_pipe.Get("P_Info").get("Target")
+ toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
+ archlist = self.data_pipe.Get("P_Info").get("ArchList")
+
+ active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")
+ workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")
+ PackagesPath = os.getenv("PACKAGES_PATH")
+ mws.setWs(workspacedir, PackagesPath)
+ self.Wa = WorkSpaceInfo(
+ workspacedir,active_p,target,toolchain,archlist
+ )
+ self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")
+ GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")
+ GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")
+ os.environ._data = self.data_pipe.Get("Env_Var")
+ GlobalData.gWorkspace = workspacedir
+ GlobalData.gDisableIncludePathCheck = False
+ GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")
+ GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")
+
+ GlobalData.gUseHashCache = self.data_pipe.Get("UseHashCache")
+ GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")
+ GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")
+ GlobalData.gPlatformHashFile = self.data_pipe.Get("PlatformHashFile")
+ GlobalData.gModulePreMakeCacheStatus = dict()
+ GlobalData.gModuleMakeCacheStatus = dict()
+ GlobalData.gHashChainStatus = dict()
+ GlobalData.gCMakeHashFile = dict()
+ GlobalData.gModuleHashFile = dict()
+ GlobalData.gFileHashDict = dict()
+ GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")
+ GlobalData.file_lock = self.file_lock
+ CommandTarget = self.data_pipe.Get("CommandTarget")
+ pcd_from_build_option = []
+ for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):
+ pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))
+ if pcd_tuple[2].strip():
+ pcd_id = ".".join((pcd_id,pcd_tuple[2]))
+ pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))
+ GlobalData.BuildOptionPcd = pcd_from_build_option
+ module_count = 0
+ FfsCmd = self.data_pipe.Get("FfsCommand")
+ if FfsCmd is None:
+ FfsCmd = {}
+ GlobalData.FfsCmd = FfsCmd
+ PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),
+ self.data_pipe.Get("P_Info").get("WorkspaceDir"))
+ while True:
+ if self.error_event.is_set():
+ break
+ module_count += 1
+ try:
+ module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()
+ except Empty:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Fake Empty."))
+ time.sleep(0.01)
+ continue
+ if module_file is None:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Worker get the last item in the queue."))
+ self.feedback_q.put("QueueEmpty")
+ time.sleep(0.01)
+ continue
+
+ modulefullpath = os.path.join(module_root,module_file)
+ taskname = " : ".join((modulefullpath,module_arch))
+ module_metafile = PathClass(module_file,module_root)
+ if module_path:
+ module_metafile.Path = module_path
+ if module_basename:
+ module_metafile.BaseName = module_basename
+ if module_originalpath:
+ module_metafile.OriginalPath = PathClass(module_originalpath,module_root)
+ arch = module_arch
+ target = self.data_pipe.Get("P_Info").get("Target")
+ toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
+ Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)
+ Ma.IsLibrary = IsLib
+ # SourceFileList calling sequence impact the makefile string sequence.
+ # Create cached SourceFileList here to unify its calling sequence for both
+ # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.
+ RetVal = Ma.SourceFileList
+ if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and CommandTarget in [None, "", "all"]:
+ try:
+ CacheResult = Ma.CanSkipbyPreMakeCache()
+ except:
+ CacheResult = False
+ self.feedback_q.put(taskname)
+
+ if CacheResult:
+ self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", True))
+ continue
+ else:
+ self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", False))
+
+ Ma.CreateCodeFile(False)
+ Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[]))
+ Ma.CreateAsBuiltInf()
+ if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:
+ try:
+ CacheResult = Ma.CanSkipbyMakeCache()
+ except:
+ CacheResult = False
+ self.feedback_q.put(taskname)
+
+ if CacheResult:
+ self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", True))
+ continue
+ else:
+ self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", False))
+
+ except Exception as e:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), str(e)))
+ self.feedback_q.put(taskname)
+ finally:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Done"))
+ self.feedback_q.put("Done")
+ self.cache_q.put("CacheDone")
+
+ def printStatus(self):
+ print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))
+ print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))
+ groupobj = {}
+ for buildobj in BuildDB.BuildObject.GetCache().values():
+ if str(buildobj).lower().endswith("dec"):
+ try:
+ groupobj['dec'].append(str(buildobj))
+ except:
+ groupobj['dec'] = [str(buildobj)]
+ if str(buildobj).lower().endswith("dsc"):
+ try:
+ groupobj['dsc'].append(str(buildobj))
+ except:
+ groupobj['dsc'] = [str(buildobj)]
+
+ if str(buildobj).lower().endswith("inf"):
+ try:
+ groupobj['inf'].append(str(buildobj))
+ except:
+ groupobj['inf'] = [str(buildobj)]
+
+ print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))
+ print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))
+ print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/BuildEngine.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/BuildEngine.py
new file mode 100755
index 00000000..8a321bf8
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/BuildEngine.py
@@ -0,0 +1,650 @@
+## @file
+# The engine for building files
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+import Common.LongFilePathOs as os
+import re
+import copy
+import string
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+from Common.GlobalData import *
+from Common.BuildToolError import *
+from Common.Misc import tdict, PathClass
+from Common.StringUtils import NormPath
+from Common.DataType import *
+from Common.TargetTxtClassObject import TargetTxtDict
+gDefaultBuildRuleFile = 'build_rule.txt'
+AutoGenReqBuildRuleVerNum = '0.1'
+
+import Common.EdkLogger as EdkLogger
+
+## Convert file type to file list macro name
+#
+# @param FileType The name of file type
+#
+# @retval string The name of macro
+#
+def FileListMacro(FileType):
+ return "%sS" % FileType.replace("-", "_").upper()
+
+## Convert file type to list file macro name
+#
+# @param FileType The name of file type
+#
+# @retval string The name of macro
+#
+def ListFileMacro(FileType):
+ return "%s_LIST" % FileListMacro(FileType)
+
+class TargetDescBlock(object):
+ def __init__(self, Inputs, Outputs, Commands, Dependencies):
+ self.InitWorker(Inputs, Outputs, Commands, Dependencies)
+
+ def InitWorker(self, Inputs, Outputs, Commands, Dependencies):
+ self.Inputs = Inputs
+ self.Outputs = Outputs
+ self.Commands = Commands
+ self.Dependencies = Dependencies
+ if self.Outputs:
+ self.Target = self.Outputs[0]
+ else:
+ self.Target = None
+
+ def __str__(self):
+ return self.Target.Path
+
+ def __hash__(self):
+ return hash(self.Target.Path)
+
+ def __eq__(self, Other):
+ if isinstance(Other, type(self)):
+ return Other.Target.Path == self.Target.Path
+ else:
+ return str(Other) == self.Target.Path
+
+ def AddInput(self, Input):
+ if Input not in self.Inputs:
+ self.Inputs.append(Input)
+
+ def IsMultipleInput(self):
+ return len(self.Inputs) > 1
+
+## Class for one build rule
+#
+# This represents a build rule which can give out corresponding command list for
+# building the given source file(s). The result can be used for generating the
+# target for makefile.
+#
+class FileBuildRule:
+ INC_LIST_MACRO = "INC_LIST"
+ INC_MACRO = "INC"
+
+ ## constructor
+ #
+ # @param Input The dictionary representing input file(s) for a rule
+ # @param Output The list representing output file(s) for a rule
+ # @param Command The list containing commands to generate the output from input
+ #
+ def __init__(self, Type, Input, Output, Command, ExtraDependency=None):
+ # The Input should not be empty
+ if not Input:
+ Input = []
+ if not Output:
+ Output = []
+ if not Command:
+ Command = []
+
+ self.FileListMacro = FileListMacro(Type)
+ self.ListFileMacro = ListFileMacro(Type)
+ self.IncListFileMacro = self.INC_LIST_MACRO
+
+ self.SourceFileType = Type
+ # source files listed not in TAB_STAR or "?" pattern format
+ if not ExtraDependency:
+ self.ExtraSourceFileList = []
+ else:
+ self.ExtraSourceFileList = ExtraDependency
+
+ #
+ # Search macros used in command lines for <FILE_TYPE>_LIST and INC_LIST.
+ # If found, generate a file to keep the input files used to get over the
+ # limitation of command line length
+ #
+ self.MacroList = []
+ self.CommandList = []
+ for CmdLine in Command:
+ self.MacroList.extend(gMacroRefPattern.findall(CmdLine))
+ # replace path separator with native one
+ self.CommandList.append(CmdLine)
+
+ # Indicate what should be generated
+ if self.FileListMacro in self.MacroList:
+ self.GenFileListMacro = True
+ else:
+ self.GenFileListMacro = False
+
+ if self.ListFileMacro in self.MacroList:
+ self.GenListFile = True
+ self.GenFileListMacro = True
+ else:
+ self.GenListFile = False
+
+ if self.INC_LIST_MACRO in self.MacroList:
+ self.GenIncListFile = True
+ else:
+ self.GenIncListFile = False
+
+ # Check input files
+ self.IsMultipleInput = False
+ self.SourceFileExtList = set()
+ for File in Input:
+ Base, Ext = os.path.splitext(File)
+ if Base.find(TAB_STAR) >= 0:
+ # There's TAB_STAR in the file name
+ self.IsMultipleInput = True
+ self.GenFileListMacro = True
+ elif Base.find("?") < 0:
+ # There's no TAB_STAR and "?" in file name
+ self.ExtraSourceFileList.append(File)
+ continue
+ self.SourceFileExtList.add(Ext)
+
+ # Check output files
+ self.DestFileList = []
+ for File in Output:
+ self.DestFileList.append(File)
+
+ # All build targets generated by this rule for a module
+ self.BuildTargets = {}
+
+ ## str() function support
+ #
+ # @retval string
+ #
+ def __str__(self):
+ SourceString = ""
+ SourceString += " %s %s %s" % (self.SourceFileType, " ".join(self.SourceFileExtList), self.ExtraSourceFileList)
+ DestString = ", ".join([str(i) for i in self.DestFileList])
+ CommandString = "\n\t".join(self.CommandList)
+ return "%s : %s\n\t%s" % (DestString, SourceString, CommandString)
+
+ def Instantiate(self, Macros = None):
+ if Macros is None:
+ Macros = {}
+ NewRuleObject = copy.copy(self)
+ NewRuleObject.BuildTargets = {}
+ NewRuleObject.DestFileList = []
+ for File in self.DestFileList:
+ NewRuleObject.DestFileList.append(PathClass(NormPath(File, Macros)))
+ return NewRuleObject
+
+ ## Apply the rule to given source file(s)
+ #
+ # @param SourceFile One file or a list of files to be built
+ # @param RelativeToDir The relative path of the source file
+ # @param PathSeparator Path separator
+ #
+ # @retval tuple (Source file in full path, List of individual sourcefiles, Destination file, List of build commands)
+ #
+ def Apply(self, SourceFile, BuildRuleOrder=None):
+ if not self.CommandList or not self.DestFileList:
+ return None
+
+ # source file
+ if self.IsMultipleInput:
+ SrcFileName = ""
+ SrcFileBase = ""
+ SrcFileExt = ""
+ SrcFileDir = ""
+ SrcPath = ""
+ # SourceFile must be a list
+ SrcFile = "$(%s)" % self.FileListMacro
+ else:
+ SrcFileName, SrcFileBase, SrcFileExt = SourceFile.Name, SourceFile.BaseName, SourceFile.Ext
+ if SourceFile.Root:
+ SrcFileDir = SourceFile.SubDir
+ if SrcFileDir == "":
+ SrcFileDir = "."
+ else:
+ SrcFileDir = "."
+ SrcFile = SourceFile.Path
+ SrcPath = SourceFile.Dir
+
+ # destination file (the first one)
+ if self.DestFileList:
+ DestFile = self.DestFileList[0].Path
+ DestPath = self.DestFileList[0].Dir
+ DestFileName = self.DestFileList[0].Name
+ DestFileBase, DestFileExt = self.DestFileList[0].BaseName, self.DestFileList[0].Ext
+ else:
+ DestFile = ""
+ DestPath = ""
+ DestFileName = ""
+ DestFileBase = ""
+ DestFileExt = ""
+
+ BuildRulePlaceholderDict = {
+ # source file
+ "src" : SrcFile,
+ "s_path" : SrcPath,
+ "s_dir" : SrcFileDir,
+ "s_name" : SrcFileName,
+ "s_base" : SrcFileBase,
+ "s_ext" : SrcFileExt,
+ # destination file
+ "dst" : DestFile,
+ "d_path" : DestPath,
+ "d_name" : DestFileName,
+ "d_base" : DestFileBase,
+ "d_ext" : DestFileExt,
+ }
+
+ DstFile = []
+ for File in self.DestFileList:
+ File = string.Template(str(File)).safe_substitute(BuildRulePlaceholderDict)
+ File = string.Template(str(File)).safe_substitute(BuildRulePlaceholderDict)
+ DstFile.append(PathClass(File, IsBinary=True))
+
+ if DstFile[0] in self.BuildTargets:
+ TargetDesc = self.BuildTargets[DstFile[0]]
+ if BuildRuleOrder and SourceFile.Ext in BuildRuleOrder:
+ Index = BuildRuleOrder.index(SourceFile.Ext)
+ for Input in TargetDesc.Inputs:
+ if Input.Ext not in BuildRuleOrder or BuildRuleOrder.index(Input.Ext) > Index:
+ #
+ # Command line should be regenerated since some macros are different
+ #
+ CommandList = self._BuildCommand(BuildRulePlaceholderDict)
+ TargetDesc.InitWorker([SourceFile], DstFile, CommandList, self.ExtraSourceFileList)
+ break
+ else:
+ TargetDesc.AddInput(SourceFile)
+ else:
+ CommandList = self._BuildCommand(BuildRulePlaceholderDict)
+ TargetDesc = TargetDescBlock([SourceFile], DstFile, CommandList, self.ExtraSourceFileList)
+ TargetDesc.ListFileMacro = self.ListFileMacro
+ TargetDesc.FileListMacro = self.FileListMacro
+ TargetDesc.IncListFileMacro = self.IncListFileMacro
+ TargetDesc.GenFileListMacro = self.GenFileListMacro
+ TargetDesc.GenListFile = self.GenListFile
+ TargetDesc.GenIncListFile = self.GenIncListFile
+ self.BuildTargets[DstFile[0]] = TargetDesc
+ return TargetDesc
+
+ def _BuildCommand(self, Macros):
+ CommandList = []
+ for CommandString in self.CommandList:
+ CommandString = string.Template(CommandString).safe_substitute(Macros)
+ CommandString = string.Template(CommandString).safe_substitute(Macros)
+ CommandList.append(CommandString)
+ return CommandList
+
+## Class for build rules
+#
+# BuildRule class parses rules defined in a file or passed by caller, and converts
+# the rule into FileBuildRule object.
+#
+class BuildRule:
+ _SectionHeader = "SECTIONHEADER"
+ _Section = "SECTION"
+ _SubSectionHeader = "SUBSECTIONHEADER"
+ _SubSection = "SUBSECTION"
+ _InputFile = "INPUTFILE"
+ _OutputFile = "OUTPUTFILE"
+ _ExtraDependency = "EXTRADEPENDENCY"
+ _Command = "COMMAND"
+ _UnknownSection = "UNKNOWNSECTION"
+
+ _SubSectionList = [_InputFile, _OutputFile, _Command]
+
+ _PATH_SEP = "(+)"
+ _FileTypePattern = re.compile("^[_a-zA-Z][_\-0-9a-zA-Z]*$")
+ _BinaryFileRule = FileBuildRule(TAB_DEFAULT_BINARY_FILE, [], [os.path.join("$(OUTPUT_DIR)", "${s_name}")],
+ ["$(CP) ${src} ${dst}"], [])
+
+ ## Constructor
+ #
+ # @param File The file containing build rules in a well defined format
+ # @param Content The string list of build rules in a well defined format
+ # @param LineIndex The line number from which the parsing will begin
+ # @param SupportedFamily The list of supported tool chain families
+ #
+ def __init__(self, File=None, Content=None, LineIndex=0, SupportedFamily=[TAB_COMPILER_MSFT, "INTEL", "GCC", "RVCT"]):
+ self.RuleFile = File
+ # Read build rules from file if it's not none
+ if File is not None:
+ try:
+ self.RuleContent = open(File, 'r').readlines()
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File)
+ elif Content is not None:
+ self.RuleContent = Content
+ else:
+ EdkLogger.error("build", PARAMETER_MISSING, ExtraData="No rule file or string given")
+
+ self.SupportedToolChainFamilyList = SupportedFamily
+ self.RuleDatabase = tdict(True, 4) # {FileExt, ModuleType, Arch, Family : FileBuildRule object}
+ self.Ext2FileType = {} # {ext : file-type}
+ self.FileTypeList = set()
+
+ self._LineIndex = LineIndex
+ self._State = ""
+ self._RuleInfo = tdict(True, 2) # {toolchain family : {"InputFile": {}, "OutputFile" : [], "Command" : []}}
+ self._FileType = ''
+ self._BuildTypeList = set()
+ self._ArchList = set()
+ self._FamilyList = []
+ self._TotalToolChainFamilySet = set()
+ self._RuleObjectList = [] # FileBuildRule object list
+ self._FileVersion = ""
+
+ self.Parse()
+
+ # some intrinsic rules
+ self.RuleDatabase[TAB_DEFAULT_BINARY_FILE, TAB_COMMON, TAB_COMMON, TAB_COMMON] = self._BinaryFileRule
+ self.FileTypeList.add(TAB_DEFAULT_BINARY_FILE)
+
+ ## Parse the build rule strings
+ def Parse(self):
+ self._State = self._Section
+ for Index in range(self._LineIndex, len(self.RuleContent)):
+ # Clean up the line and replace path separator with native one
+ Line = self.RuleContent[Index].strip().replace(self._PATH_SEP, os.path.sep)
+ self.RuleContent[Index] = Line
+
+ # find the build_rule_version
+ if Line and Line[0] == "#" and Line.find(TAB_BUILD_RULE_VERSION) != -1:
+ if Line.find("=") != -1 and Line.find("=") < (len(Line) - 1) and (Line[(Line.find("=") + 1):]).split():
+ self._FileVersion = (Line[(Line.find("=") + 1):]).split()[0]
+ # skip empty or comment line
+ if Line == "" or Line[0] == "#":
+ continue
+
+ # find out section header, enclosed by []
+ if Line[0] == '[' and Line[-1] == ']':
+ # merge last section information into rule database
+ self.EndOfSection()
+ self._State = self._SectionHeader
+ # find out sub-section header, enclosed by <>
+ elif Line[0] == '<' and Line[-1] == '>':
+ if self._State != self._UnknownSection:
+ self._State = self._SubSectionHeader
+
+ # call section handler to parse each (sub)section
+ self._StateHandler[self._State](self, Index)
+ # merge last section information into rule database
+ self.EndOfSection()
+
+ ## Parse definitions under a section
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def ParseSection(self, LineIndex):
+ pass
+
+ ## Parse definitions under a subsection
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def ParseSubSection(self, LineIndex):
+ # currently nothing here
+ pass
+
+ ## Placeholder for not supported sections
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def SkipSection(self, LineIndex):
+ pass
+
+ ## Merge section information just got into rule database
+ def EndOfSection(self):
+ Database = self.RuleDatabase
+ # if there's specific toolchain family, 'COMMON' doesn't make sense any more
+ if len(self._TotalToolChainFamilySet) > 1 and TAB_COMMON in self._TotalToolChainFamilySet:
+ self._TotalToolChainFamilySet.remove(TAB_COMMON)
+ for Family in self._TotalToolChainFamilySet:
+ Input = self._RuleInfo[Family, self._InputFile]
+ Output = self._RuleInfo[Family, self._OutputFile]
+ Command = self._RuleInfo[Family, self._Command]
+ ExtraDependency = self._RuleInfo[Family, self._ExtraDependency]
+
+ BuildRule = FileBuildRule(self._FileType, Input, Output, Command, ExtraDependency)
+ for BuildType in self._BuildTypeList:
+ for Arch in self._ArchList:
+ Database[self._FileType, BuildType, Arch, Family] = BuildRule
+ for FileExt in BuildRule.SourceFileExtList:
+ self.Ext2FileType[FileExt] = self._FileType
+
+ ## Parse section header
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def ParseSectionHeader(self, LineIndex):
+ self._RuleInfo = tdict(True, 2)
+ self._BuildTypeList = set()
+ self._ArchList = set()
+ self._FamilyList = []
+ self._TotalToolChainFamilySet = set()
+ FileType = ''
+ RuleNameList = self.RuleContent[LineIndex][1:-1].split(',')
+ for RuleName in RuleNameList:
+ Arch = TAB_COMMON
+ BuildType = TAB_COMMON
+ TokenList = [Token.strip().upper() for Token in RuleName.split('.')]
+ # old format: Build.File-Type
+ if TokenList[0] == "BUILD":
+ if len(TokenList) == 1:
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid rule section",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+
+ FileType = TokenList[1]
+ if FileType == '':
+ EdkLogger.error("build", FORMAT_INVALID, "No file type given",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+ if self._FileTypePattern.match(FileType) is None:
+ EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type")
+ # new format: File-Type.Build-Type.Arch
+ else:
+ if FileType == '':
+ FileType = TokenList[0]
+ elif FileType != TokenList[0]:
+ EdkLogger.error("build", FORMAT_INVALID,
+ "Different file types are not allowed in the same rule section",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+ if len(TokenList) > 1:
+ BuildType = TokenList[1]
+ if len(TokenList) > 2:
+ Arch = TokenList[2]
+ self._BuildTypeList.add(BuildType)
+ self._ArchList.add(Arch)
+
+ if TAB_COMMON in self._BuildTypeList and len(self._BuildTypeList) > 1:
+ EdkLogger.error("build", FORMAT_INVALID,
+ "Specific build types must not be mixed with common one",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+ if TAB_COMMON in self._ArchList and len(self._ArchList) > 1:
+ EdkLogger.error("build", FORMAT_INVALID,
+ "Specific ARCH must not be mixed with common one",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+
+ self._FileType = FileType
+ self._State = self._Section
+ self.FileTypeList.add(FileType)
+
+ ## Parse sub-section header
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def ParseSubSectionHeader(self, LineIndex):
+ SectionType = ""
+ List = self.RuleContent[LineIndex][1:-1].split(',')
+ FamilyList = []
+ for Section in List:
+ TokenList = Section.split('.')
+ Type = TokenList[0].strip().upper()
+
+ if SectionType == "":
+ SectionType = Type
+ elif SectionType != Type:
+ EdkLogger.error("build", FORMAT_INVALID,
+ "Two different section types are not allowed in the same sub-section",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+
+ if len(TokenList) > 1:
+ Family = TokenList[1].strip().upper()
+ else:
+ Family = TAB_COMMON
+
+ if Family not in FamilyList:
+ FamilyList.append(Family)
+
+ self._FamilyList = FamilyList
+ self._TotalToolChainFamilySet.update(FamilyList)
+ self._State = SectionType.upper()
+ if TAB_COMMON in FamilyList and len(FamilyList) > 1:
+ EdkLogger.error("build", FORMAT_INVALID,
+ "Specific tool chain family should not be mixed with general one",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+ if self._State not in self._StateHandler:
+ EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData="Unknown subsection: %s" % self.RuleContent[LineIndex])
+ ## Parse <InputFile> sub-section
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def ParseInputFileSubSection(self, LineIndex):
+ FileList = [File.strip() for File in self.RuleContent[LineIndex].split(",")]
+ for ToolChainFamily in self._FamilyList:
+ if self._RuleInfo[ToolChainFamily, self._State] is None:
+ self._RuleInfo[ToolChainFamily, self._State] = []
+ self._RuleInfo[ToolChainFamily, self._State].extend(FileList)
+
+ ## Parse <ExtraDependency> sub-section
+ ## Parse <OutputFile> sub-section
+ ## Parse <Command> sub-section
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def ParseCommonSubSection(self, LineIndex):
+ for ToolChainFamily in self._FamilyList:
+ if self._RuleInfo[ToolChainFamily, self._State] is None:
+ self._RuleInfo[ToolChainFamily, self._State] = []
+ self._RuleInfo[ToolChainFamily, self._State].append(self.RuleContent[LineIndex])
+
+ ## Get a build rule via [] operator
+ #
+ # @param FileExt The extension of a file
+ # @param ToolChainFamily The tool chain family name
+ # @param BuildVersion The build version number. TAB_STAR means any rule
+ # is applicable.
+ #
+ # @retval FileType The file type string
+ # @retval FileBuildRule The object of FileBuildRule
+ #
+ # Key = (FileExt, ModuleType, Arch, ToolChainFamily)
+ def __getitem__(self, Key):
+ if not Key:
+ return None
+
+ if Key[0] in self.Ext2FileType:
+ Type = self.Ext2FileType[Key[0]]
+ elif Key[0].upper() in self.FileTypeList:
+ Type = Key[0].upper()
+ else:
+ return None
+
+ if len(Key) > 1:
+ Key = (Type,) + Key[1:]
+ else:
+ Key = (Type,)
+ return self.RuleDatabase[Key]
+
+ _StateHandler = {
+ _SectionHeader : ParseSectionHeader,
+ _Section : ParseSection,
+ _SubSectionHeader : ParseSubSectionHeader,
+ _SubSection : ParseSubSection,
+ _InputFile : ParseInputFileSubSection,
+ _OutputFile : ParseCommonSubSection,
+ _ExtraDependency : ParseCommonSubSection,
+ _Command : ParseCommonSubSection,
+ _UnknownSection : SkipSection,
+ }
+
+class ToolBuildRule():
+
+ def __new__(cls, *args, **kw):
+ if not hasattr(cls, '_instance'):
+ orig = super(ToolBuildRule, cls)
+ cls._instance = orig.__new__(cls, *args, **kw)
+ return cls._instance
+
+ def __init__(self):
+ if not hasattr(self, 'ToolBuildRule'):
+ self._ToolBuildRule = None
+
+ @property
+ def ToolBuildRule(self):
+ if not self._ToolBuildRule:
+ self._GetBuildRule()
+ return self._ToolBuildRule
+
+ def _GetBuildRule(self):
+ BuildRuleFile = None
+ TargetObj = TargetTxtDict()
+ TargetTxt = TargetObj.Target
+ if TAB_TAT_DEFINES_BUILD_RULE_CONF in TargetTxt.TargetTxtDictionary:
+ BuildRuleFile = TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]
+ if not BuildRuleFile:
+ BuildRuleFile = gDefaultBuildRuleFile
+ RetVal = BuildRule(BuildRuleFile)
+ if RetVal._FileVersion == "":
+ RetVal._FileVersion = AutoGenReqBuildRuleVerNum
+ else:
+ if RetVal._FileVersion < AutoGenReqBuildRuleVerNum :
+ # If Build Rule's version is less than the version number required by the tools, halting the build.
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ ExtraData="The version number [%s] of build_rule.txt is less than the version number required by the AutoGen.(the minimum required version number is [%s])"\
+ % (RetVal._FileVersion, AutoGenReqBuildRuleVerNum))
+ self._ToolBuildRule = RetVal
+
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+if __name__ == '__main__':
+ import sys
+ EdkLogger.Initialize()
+ if len(sys.argv) > 1:
+ Br = BuildRule(sys.argv[1])
+ print(str(Br[".c", SUP_MODULE_DXE_DRIVER, "IA32", TAB_COMPILER_MSFT][1]))
+ print()
+ print(str(Br[".c", SUP_MODULE_DXE_DRIVER, "IA32", "INTEL"][1]))
+ print()
+ print(str(Br[".c", SUP_MODULE_DXE_DRIVER, "IA32", "GCC"][1]))
+ print()
+ print(str(Br[".ac", "ACPI_TABLE", "IA32", TAB_COMPILER_MSFT][1]))
+ print()
+ print(str(Br[".h", "ACPI_TABLE", "IA32", "INTEL"][1]))
+ print()
+ print(str(Br[".ac", "ACPI_TABLE", "IA32", TAB_COMPILER_MSFT][1]))
+ print()
+ print(str(Br[".s", SUP_MODULE_SEC, "IPF", "COMMON"][1]))
+ print()
+ print(str(Br[".s", SUP_MODULE_SEC][1]))
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/DataPipe.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/DataPipe.py
new file mode 100755
index 00000000..d5028f39
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/DataPipe.py
@@ -0,0 +1,169 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from __future__ import absolute_import
+from Workspace.WorkspaceDatabase import BuildDB
+from Workspace.WorkspaceCommon import GetModuleLibInstances
+import Common.GlobalData as GlobalData
+import os
+import pickle
+from pickle import HIGHEST_PROTOCOL
+from Common import EdkLogger
+
+class PCD_DATA():
+ def __init__(self,TokenCName,TokenSpaceGuidCName,Type,DatumType,SkuInfoList,DefaultValue,
+ MaxDatumSize,UserDefinedDefaultStoresFlag,validateranges,
+ validlists,expressions,CustomAttribute,TokenValue):
+ self.TokenCName = TokenCName
+ self.TokenSpaceGuidCName = TokenSpaceGuidCName
+ self.Type = Type
+ self.DatumType = DatumType
+ self.SkuInfoList = SkuInfoList
+ self.DefaultValue = DefaultValue
+ self.MaxDatumSize = MaxDatumSize
+ self.UserDefinedDefaultStoresFlag = UserDefinedDefaultStoresFlag
+ self.validateranges = validateranges
+ self.validlists = validlists
+ self.expressions = expressions
+ self.CustomAttribute = CustomAttribute
+ self.TokenValue = TokenValue
+
+class DataPipe(object):
+ def __init__(self, BuildDir=None):
+ self.data_container = {}
+ self.BuildDir = BuildDir
+ self.dump_file = ""
+
+class MemoryDataPipe(DataPipe):
+
+ def Get(self,key):
+ return self.data_container.get(key)
+
+ def dump(self,file_path):
+ self.dump_file = file_path
+ with open(file_path,'wb') as fd:
+ pickle.dump(self.data_container,fd,pickle.HIGHEST_PROTOCOL)
+
+ def load(self,file_path):
+ with open(file_path,'rb') as fd:
+ self.data_container = pickle.load(fd)
+
+ @property
+ def DataContainer(self):
+ return self.data_container
+ @DataContainer.setter
+ def DataContainer(self,data):
+ self.data_container.update(data)
+
+ def FillData(self,PlatformInfo):
+ #Platform Pcds
+ self.DataContainer = {
+ "PLA_PCD" : [PCD_DATA(
+ pcd.TokenCName,pcd.TokenSpaceGuidCName,pcd.Type,
+ pcd.DatumType,pcd.SkuInfoList,pcd.DefaultValue,
+ pcd.MaxDatumSize,pcd.UserDefinedDefaultStoresFlag,pcd.validateranges,
+ pcd.validlists,pcd.expressions,pcd.CustomAttribute,pcd.TokenValue)
+ for pcd in PlatformInfo.Platform.Pcds.values()]
+ }
+
+ #Platform Module Pcds
+ ModulePcds = {}
+ for m in PlatformInfo.Platform.Modules:
+ module = PlatformInfo.Platform.Modules[m]
+ m_pcds = module.Pcds
+ if m_pcds:
+ ModulePcds[module.Guid] = [PCD_DATA(
+ pcd.TokenCName,pcd.TokenSpaceGuidCName,pcd.Type,
+ pcd.DatumType,pcd.SkuInfoList,pcd.DefaultValue,
+ pcd.MaxDatumSize,pcd.UserDefinedDefaultStoresFlag,pcd.validateranges,
+ pcd.validlists,pcd.expressions,pcd.CustomAttribute,pcd.TokenValue)
+ for pcd in PlatformInfo.Platform.Modules[m].Pcds.values()]
+
+
+ self.DataContainer = {"MOL_PCDS":ModulePcds}
+
+ #Module's Library Instance
+ ModuleLibs = {}
+ libModules = {}
+ for m in PlatformInfo.Platform.Modules:
+ module_obj = BuildDB.BuildObject[m,PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain]
+ Libs = GetModuleLibInstances(module_obj, PlatformInfo.Platform, BuildDB.BuildObject, PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain,PlatformInfo.MetaFile,EdkLogger)
+ for lib in Libs:
+ try:
+ libModules[(lib.MetaFile.File,lib.MetaFile.Root,lib.Arch,lib.MetaFile.Path)].append((m.File,m.Root,module_obj.Arch,m.Path))
+ except:
+ libModules[(lib.MetaFile.File,lib.MetaFile.Root,lib.Arch,lib.MetaFile.Path)] = [(m.File,m.Root,module_obj.Arch,m.Path)]
+ ModuleLibs[(m.File,m.Root,module_obj.Arch,m.Path)] = [(l.MetaFile.File,l.MetaFile.Root,l.Arch,l.MetaFile.Path) for l in Libs]
+ self.DataContainer = {"DEPS":ModuleLibs}
+ self.DataContainer = {"REFS":libModules}
+
+ #Platform BuildOptions
+
+ platform_build_opt = PlatformInfo.EdkIIBuildOption
+
+ ToolDefinition = PlatformInfo.ToolDefinition
+ module_build_opt = {}
+ for m in PlatformInfo.Platform.Modules:
+ ModuleTypeOptions, PlatformModuleOptions = PlatformInfo.GetGlobalBuildOptions(BuildDB.BuildObject[m,PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain])
+ if ModuleTypeOptions or PlatformModuleOptions:
+ module_build_opt.update({(m.File,m.Root): {"ModuleTypeOptions":ModuleTypeOptions, "PlatformModuleOptions":PlatformModuleOptions}})
+
+ self.DataContainer = {"PLA_BO":platform_build_opt,
+ "TOOLDEF":ToolDefinition,
+ "MOL_BO":module_build_opt
+ }
+
+
+
+ #Platform Info
+ PInfo = {
+ "WorkspaceDir":PlatformInfo.Workspace.WorkspaceDir,
+ "Target":PlatformInfo.BuildTarget,
+ "ToolChain":PlatformInfo.Workspace.ToolChain,
+ "BuildRuleFile":PlatformInfo.BuildRule,
+ "Arch": PlatformInfo.Arch,
+ "ArchList":PlatformInfo.Workspace.ArchList,
+ "ActivePlatform":PlatformInfo.MetaFile
+ }
+ self.DataContainer = {'P_Info':PInfo}
+
+ self.DataContainer = {'M_Name':PlatformInfo.UniqueBaseName}
+
+ self.DataContainer = {"ToolChainFamily": PlatformInfo.ToolChainFamily}
+
+ self.DataContainer = {"BuildRuleFamily": PlatformInfo.BuildRuleFamily}
+
+ self.DataContainer = {"MixedPcd":GlobalData.MixedPcd}
+
+ self.DataContainer = {"BuildOptPcd":GlobalData.BuildOptionPcd}
+
+ self.DataContainer = {"BuildCommand": PlatformInfo.BuildCommand}
+
+ self.DataContainer = {"AsBuildModuleList": PlatformInfo._AsBuildModuleList}
+
+ self.DataContainer = {"G_defines": GlobalData.gGlobalDefines}
+
+ self.DataContainer = {"CL_defines": GlobalData.gCommandLineDefines}
+
+ self.DataContainer = {"Env_Var": {k:v for k, v in os.environ.items()}}
+
+ self.DataContainer = {"PackageList": [(dec.MetaFile,dec.Arch) for dec in PlatformInfo.PackageList]}
+
+ self.DataContainer = {"GuidDict": PlatformInfo.Platform._GuidDict}
+
+ self.DataContainer = {"DatabasePath":GlobalData.gDatabasePath}
+
+ self.DataContainer = {"FdfParser": True if GlobalData.gFdfParser else False}
+
+ self.DataContainer = {"LogLevel": EdkLogger.GetLevel()}
+
+ self.DataContainer = {"UseHashCache":GlobalData.gUseHashCache}
+
+ self.DataContainer = {"BinCacheSource":GlobalData.gBinCacheSource}
+
+ self.DataContainer = {"BinCacheDest":GlobalData.gBinCacheDest}
+
+ self.DataContainer = {"EnableGenfdsMultiThread":GlobalData.gEnableGenfdsMultiThread}
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenC.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenC.py
new file mode 100755
index 00000000..7c7a7d53
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenC.py
@@ -0,0 +1,2111 @@
+## @file
+# Routines for generating AutoGen.h and AutoGen.c
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+from __future__ import absolute_import
+import string
+import collections
+import struct
+from Common import EdkLogger
+from Common import GlobalData
+from Common.BuildToolError import *
+from Common.DataType import *
+from Common.Misc import *
+from Common.StringUtils import StringToArray
+from .StrGather import *
+from .GenPcdDb import CreatePcdDatabaseCode
+from .IdfClassObject import *
+
+## PCD type string
+gItemTypeStringDatabase = {
+ TAB_PCDS_FEATURE_FLAG : TAB_PCDS_FIXED_AT_BUILD,
+ TAB_PCDS_FIXED_AT_BUILD : TAB_PCDS_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE: 'BinaryPatch',
+ TAB_PCDS_DYNAMIC : '',
+ TAB_PCDS_DYNAMIC_DEFAULT : '',
+ TAB_PCDS_DYNAMIC_VPD : '',
+ TAB_PCDS_DYNAMIC_HII : '',
+ TAB_PCDS_DYNAMIC_EX : '',
+ TAB_PCDS_DYNAMIC_EX_DEFAULT : '',
+ TAB_PCDS_DYNAMIC_EX_VPD : '',
+ TAB_PCDS_DYNAMIC_EX_HII : '',
+}
+
+
+## Datum size
+gDatumSizeStringDatabase = {TAB_UINT8:'8',TAB_UINT16:'16',TAB_UINT32:'32',TAB_UINT64:'64','BOOLEAN':'BOOLEAN',TAB_VOID:'8'}
+gDatumSizeStringDatabaseH = {TAB_UINT8:'8',TAB_UINT16:'16',TAB_UINT32:'32',TAB_UINT64:'64','BOOLEAN':'BOOL',TAB_VOID:'PTR'}
+gDatumSizeStringDatabaseLib = {TAB_UINT8:'8',TAB_UINT16:'16',TAB_UINT32:'32',TAB_UINT64:'64','BOOLEAN':'Bool',TAB_VOID:'Ptr'}
+
+## AutoGen File Header Templates
+gAutoGenHeaderString = TemplateString("""\
+/**
+ DO NOT EDIT
+ FILE auto-generated
+ Module name:
+ ${FileName}
+ Abstract: Auto-generated ${FileName} for building module or library.
+**/
+""")
+
+gAutoGenHPrologueString = TemplateString("""
+#ifndef _${File}_${Guid}
+#define _${File}_${Guid}
+
+""")
+
+gAutoGenHCppPrologueString = """\
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+"""
+
+gAutoGenHEpilogueString = """
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
+"""
+
+## PEI Core Entry Point Templates
+gPeiCoreEntryPointPrototype = TemplateString("""
+${BEGIN}
+VOID
+EFIAPI
+${Function} (
+ IN CONST EFI_SEC_PEI_HAND_OFF *SecCoreData,
+ IN CONST EFI_PEI_PPI_DESCRIPTOR *PpiList,
+ IN VOID *Context
+ );
+${END}
+""")
+
+gPeiCoreEntryPointString = TemplateString("""
+${BEGIN}
+VOID
+EFIAPI
+ProcessModuleEntryPointList (
+ IN CONST EFI_SEC_PEI_HAND_OFF *SecCoreData,
+ IN CONST EFI_PEI_PPI_DESCRIPTOR *PpiList,
+ IN VOID *Context
+ )
+
+{
+ ${Function} (SecCoreData, PpiList, Context);
+}
+${END}
+""")
+
+
+## DXE Core Entry Point Templates
+gDxeCoreEntryPointPrototype = TemplateString("""
+${BEGIN}
+VOID
+EFIAPI
+${Function} (
+ IN VOID *HobStart
+ );
+${END}
+""")
+
+gDxeCoreEntryPointString = TemplateString("""
+${BEGIN}
+VOID
+EFIAPI
+ProcessModuleEntryPointList (
+ IN VOID *HobStart
+ )
+
+{
+ ${Function} (HobStart);
+}
+${END}
+""")
+
+## PEIM Entry Point Templates
+gPeimEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_PEI_FILE_HANDLE FileHandle,
+ IN CONST EFI_PEI_SERVICES **PeiServices
+ );
+${END}
+""")
+
+gPeimEntryPointString = [
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_PEI_FILE_HANDLE FileHandle,
+ IN CONST EFI_PEI_SERVICES **PeiServices
+ )
+
+{
+ return EFI_SUCCESS;
+}
+"""),
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion};
+${BEGIN}
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_PEI_FILE_HANDLE FileHandle,
+ IN CONST EFI_PEI_SERVICES **PeiServices
+ )
+
+{
+ return ${Function} (FileHandle, PeiServices);
+}
+${END}
+"""),
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_PEI_FILE_HANDLE FileHandle,
+ IN CONST EFI_PEI_SERVICES **PeiServices
+ )
+
+{
+ EFI_STATUS Status;
+ EFI_STATUS CombinedStatus;
+
+ CombinedStatus = EFI_LOAD_ERROR;
+${BEGIN}
+ Status = ${Function} (FileHandle, PeiServices);
+ if (!EFI_ERROR (Status) || EFI_ERROR (CombinedStatus)) {
+ CombinedStatus = Status;
+ }
+${END}
+ return CombinedStatus;
+}
+""")
+]
+
+## SMM_CORE Entry Point Templates
+gSmmCoreEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ );
+${END}
+""")
+
+gSmmCoreEntryPointString = TemplateString("""
+${BEGIN}
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+const UINT32 _gDxeRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+{
+ return ${Function} (ImageHandle, SystemTable);
+}
+${END}
+""")
+
+## MM_CORE_STANDALONE Entry Point Templates
+gMmCoreStandaloneEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN VOID *HobStart
+ );
+${END}
+""")
+
+gMmCoreStandaloneEntryPointString = TemplateString("""
+${BEGIN}
+const UINT32 _gMmRevision = ${PiSpecVersion};
+
+VOID
+EFIAPI
+ProcessModuleEntryPointList (
+ IN VOID *HobStart
+ )
+{
+ ${Function} (HobStart);
+}
+${END}
+""")
+
+## MM_STANDALONE Entry Point Templates
+gMmStandaloneEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_MM_SYSTEM_TABLE *MmSystemTable
+ );
+${END}
+""")
+
+gMmStandaloneEntryPointString = [
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gMmRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_MM_SYSTEM_TABLE *MmSystemTable
+ )
+
+{
+ return EFI_SUCCESS;
+}
+"""),
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gMmRevision = ${PiSpecVersion};
+${BEGIN}
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_MM_SYSTEM_TABLE *MmSystemTable
+ )
+
+{
+ return ${Function} (ImageHandle, MmSystemTable);
+}
+${END}
+"""),
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gMmRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_MM_SYSTEM_TABLE *MmSystemTable
+ )
+
+{
+ EFI_STATUS Status;
+ EFI_STATUS CombinedStatus;
+
+ CombinedStatus = EFI_LOAD_ERROR;
+${BEGIN}
+ Status = ${Function} (ImageHandle, MmSystemTable);
+ if (!EFI_ERROR (Status) || EFI_ERROR (CombinedStatus)) {
+ CombinedStatus = Status;
+ }
+${END}
+ return CombinedStatus;
+}
+""")
+]
+
+## DXE SMM Entry Point Templates
+gDxeSmmEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ );
+${END}
+""")
+
+gDxeSmmEntryPointString = [
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+const UINT32 _gDxeRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+
+{
+ return EFI_SUCCESS;
+}
+"""),
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+const UINT32 _gDxeRevision = ${PiSpecVersion};
+
+static BASE_LIBRARY_JUMP_BUFFER mJumpContext;
+static EFI_STATUS mDriverEntryPointStatus;
+
+VOID
+EFIAPI
+ExitDriver (
+ IN EFI_STATUS Status
+ )
+{
+ if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) {
+ mDriverEntryPointStatus = Status;
+ }
+ LongJump (&mJumpContext, (UINTN)-1);
+ ASSERT (FALSE);
+}
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+{
+ mDriverEntryPointStatus = EFI_LOAD_ERROR;
+
+${BEGIN}
+ if (SetJump (&mJumpContext) == 0) {
+ ExitDriver (${Function} (ImageHandle, SystemTable));
+ ASSERT (FALSE);
+ }
+${END}
+
+ return mDriverEntryPointStatus;
+}
+""")
+]
+
+## UEFI Driver Entry Point Templates
+gUefiDriverEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ );
+${END}
+""")
+
+gUefiDriverEntryPointString = [
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+const UINT32 _gDxeRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+{
+ return EFI_SUCCESS;
+}
+"""),
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+const UINT32 _gDxeRevision = ${PiSpecVersion};
+
+${BEGIN}
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+
+{
+ return ${Function} (ImageHandle, SystemTable);
+}
+${END}
+VOID
+EFIAPI
+ExitDriver (
+ IN EFI_STATUS Status
+ )
+{
+ if (EFI_ERROR (Status)) {
+ ProcessLibraryDestructorList (gImageHandle, gST);
+ }
+ gBS->Exit (gImageHandle, Status, 0, NULL);
+}
+"""),
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+const UINT32 _gDxeRevision = ${PiSpecVersion};
+
+static BASE_LIBRARY_JUMP_BUFFER mJumpContext;
+static EFI_STATUS mDriverEntryPointStatus;
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+{
+ mDriverEntryPointStatus = EFI_LOAD_ERROR;
+ ${BEGIN}
+ if (SetJump (&mJumpContext) == 0) {
+ ExitDriver (${Function} (ImageHandle, SystemTable));
+ ASSERT (FALSE);
+ }
+ ${END}
+ return mDriverEntryPointStatus;
+}
+
+VOID
+EFIAPI
+ExitDriver (
+ IN EFI_STATUS Status
+ )
+{
+ if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) {
+ mDriverEntryPointStatus = Status;
+ }
+ LongJump (&mJumpContext, (UINTN)-1);
+ ASSERT (FALSE);
+}
+""")
+]
+
+
+## UEFI Application Entry Point Templates
+gUefiApplicationEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ );
+${END}
+""")
+
+gUefiApplicationEntryPointString = [
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+{
+ return EFI_SUCCESS;
+}
+"""),
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+
+${BEGIN}
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+
+{
+ return ${Function} (ImageHandle, SystemTable);
+}
+${END}
+VOID
+EFIAPI
+ExitDriver (
+ IN EFI_STATUS Status
+ )
+{
+ if (EFI_ERROR (Status)) {
+ ProcessLibraryDestructorList (gImageHandle, gST);
+ }
+ gBS->Exit (gImageHandle, Status, 0, NULL);
+}
+"""),
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+
+{
+ ${BEGIN}
+ if (SetJump (&mJumpContext) == 0) {
+ ExitDriver (${Function} (ImageHandle, SystemTable));
+ ASSERT (FALSE);
+ }
+ ${END}
+ return mDriverEntryPointStatus;
+}
+
+static BASE_LIBRARY_JUMP_BUFFER mJumpContext;
+static EFI_STATUS mDriverEntryPointStatus = EFI_LOAD_ERROR;
+
+VOID
+EFIAPI
+ExitDriver (
+ IN EFI_STATUS Status
+ )
+{
+ if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) {
+ mDriverEntryPointStatus = Status;
+ }
+ LongJump (&mJumpContext, (UINTN)-1);
+ ASSERT (FALSE);
+}
+""")
+]
+
+## UEFI Unload Image Templates
+gUefiUnloadImagePrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle
+ );
+${END}
+""")
+
+gUefiUnloadImageString = [
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleUnloadList (
+ IN EFI_HANDLE ImageHandle
+ )
+{
+ return EFI_SUCCESS;
+}
+"""),
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count};
+
+${BEGIN}
+EFI_STATUS
+EFIAPI
+ProcessModuleUnloadList (
+ IN EFI_HANDLE ImageHandle
+ )
+{
+ return ${Function} (ImageHandle);
+}
+${END}
+"""),
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleUnloadList (
+ IN EFI_HANDLE ImageHandle
+ )
+{
+ EFI_STATUS Status;
+
+ Status = EFI_SUCCESS;
+${BEGIN}
+ if (EFI_ERROR (Status)) {
+ ${Function} (ImageHandle);
+ } else {
+ Status = ${Function} (ImageHandle);
+ }
+${END}
+ return Status;
+}
+""")
+]
+
+gLibraryStructorPrototype = {
+SUP_MODULE_BASE : TemplateString("""${BEGIN}
+RETURN_STATUS
+EFIAPI
+${Function} (
+ VOID
+ );${END}
+"""),
+
+'PEI' : TemplateString("""${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_PEI_FILE_HANDLE FileHandle,
+ IN CONST EFI_PEI_SERVICES **PeiServices
+ );${END}
+"""),
+
+'DXE' : TemplateString("""${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ );${END}
+"""),
+
+'MM' : TemplateString("""${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_MM_SYSTEM_TABLE *MmSystemTable
+ );${END}
+"""),
+}
+
+gLibraryStructorCall = {
+SUP_MODULE_BASE : TemplateString("""${BEGIN}
+ Status = ${Function} ();
+ ASSERT_RETURN_ERROR (Status);${END}
+"""),
+
+'PEI' : TemplateString("""${BEGIN}
+ Status = ${Function} (FileHandle, PeiServices);
+ ASSERT_EFI_ERROR (Status);${END}
+"""),
+
+'DXE' : TemplateString("""${BEGIN}
+ Status = ${Function} (ImageHandle, SystemTable);
+ ASSERT_EFI_ERROR (Status);${END}
+"""),
+
+'MM' : TemplateString("""${BEGIN}
+ Status = ${Function} (ImageHandle, MmSystemTable);
+ ASSERT_EFI_ERROR (Status);${END}
+"""),
+}
+
+## Library Constructor and Destructor Templates
+gLibraryString = {
+SUP_MODULE_BASE : TemplateString("""
+${BEGIN}${FunctionPrototype}${END}
+
+VOID
+EFIAPI
+ProcessLibrary${Type}List (
+ VOID
+ )
+{
+${BEGIN} RETURN_STATUS Status;
+${FunctionCall}${END}
+}
+"""),
+
+'PEI' : TemplateString("""
+${BEGIN}${FunctionPrototype}${END}
+
+VOID
+EFIAPI
+ProcessLibrary${Type}List (
+ IN EFI_PEI_FILE_HANDLE FileHandle,
+ IN CONST EFI_PEI_SERVICES **PeiServices
+ )
+{
+${BEGIN} EFI_STATUS Status;
+${FunctionCall}${END}
+}
+"""),
+
+'DXE' : TemplateString("""
+${BEGIN}${FunctionPrototype}${END}
+
+VOID
+EFIAPI
+ProcessLibrary${Type}List (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+{
+${BEGIN} EFI_STATUS Status;
+${FunctionCall}${END}
+}
+"""),
+
+'MM' : TemplateString("""
+${BEGIN}${FunctionPrototype}${END}
+
+VOID
+EFIAPI
+ProcessLibrary${Type}List (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_MM_SYSTEM_TABLE *MmSystemTable
+ )
+{
+${BEGIN} EFI_STATUS Status;
+${FunctionCall}${END}
+}
+"""),
+}
+
+gBasicHeaderFile = "Base.h"
+
+gModuleTypeHeaderFile = {
+ SUP_MODULE_BASE : [gBasicHeaderFile, "Library/DebugLib.h"],
+ SUP_MODULE_SEC : ["PiPei.h", "Library/DebugLib.h"],
+ SUP_MODULE_PEI_CORE : ["PiPei.h", "Library/DebugLib.h", "Library/PeiCoreEntryPoint.h"],
+ SUP_MODULE_PEIM : ["PiPei.h", "Library/DebugLib.h", "Library/PeimEntryPoint.h"],
+ SUP_MODULE_DXE_CORE : ["PiDxe.h", "Library/DebugLib.h", "Library/DxeCoreEntryPoint.h"],
+ SUP_MODULE_DXE_DRIVER : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
+ SUP_MODULE_DXE_SMM_DRIVER : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
+ SUP_MODULE_DXE_RUNTIME_DRIVER: ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
+ SUP_MODULE_DXE_SAL_DRIVER : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
+ SUP_MODULE_UEFI_DRIVER : ["Uefi.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
+ SUP_MODULE_UEFI_APPLICATION : ["Uefi.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiApplicationEntryPoint.h"],
+ SUP_MODULE_SMM_CORE : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiDriverEntryPoint.h"],
+ SUP_MODULE_MM_STANDALONE : ["PiMm.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/StandaloneMmDriverEntryPoint.h"],
+ SUP_MODULE_MM_CORE_STANDALONE : ["PiMm.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/StandaloneMmCoreEntryPoint.h"],
+ SUP_MODULE_USER_DEFINED : [gBasicHeaderFile, "Library/DebugLib.h"],
+ SUP_MODULE_HOST_APPLICATION : [gBasicHeaderFile, "Library/DebugLib.h"]
+}
+
+## Autogen internal worker macro to define DynamicEx PCD name includes both the TokenSpaceGuidName
+# the TokenName and Guid comparison to avoid define name collisions.
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenH The TemplateString object for header file
+#
+#
+def DynExPcdTokenNumberMapping(Info, AutoGenH):
+ ExTokenCNameList = []
+ PcdExList = []
+ # Even it is the Library, the PCD is saved in the ModulePcdList
+ PcdList = Info.ModulePcdList
+ for Pcd in PcdList:
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ ExTokenCNameList.append(Pcd.TokenCName)
+ PcdExList.append(Pcd)
+ if len(ExTokenCNameList) == 0:
+ return
+ AutoGenH.Append('\n#define COMPAREGUID(Guid1, Guid2) (BOOLEAN)(*(CONST UINT64*)Guid1 == *(CONST UINT64*)Guid2 && *((CONST UINT64*)Guid1 + 1) == *((CONST UINT64*)Guid2 + 1))\n')
+ # AutoGen for each PCD listed in a [PcdEx] section of a Module/Lib INF file.
+ # Auto generate a macro for each TokenName that takes a Guid pointer as a parameter.
+ # Use the Guid pointer to see if it matches any of the token space GUIDs.
+ TokenCNameList = set()
+ for TokenCName in ExTokenCNameList:
+ if TokenCName in TokenCNameList:
+ continue
+ Index = 0
+ Count = ExTokenCNameList.count(TokenCName)
+ for Pcd in PcdExList:
+ RealTokenCName = Pcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ RealTokenCName = PcdItem[0]
+ break
+ if Pcd.TokenCName == TokenCName:
+ Index = Index + 1
+ if Index == 1:
+ AutoGenH.Append('\n#define __PCD_%s_ADDR_CMP(GuidPtr) (' % (RealTokenCName))
+ AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
+ % (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
+ else:
+ AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
+ % (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
+ if Index == Count:
+ AutoGenH.Append('0 \\\n )\n')
+ TokenCNameList.add(TokenCName)
+
+ TokenCNameList = set()
+ for TokenCName in ExTokenCNameList:
+ if TokenCName in TokenCNameList:
+ continue
+ Index = 0
+ Count = ExTokenCNameList.count(TokenCName)
+ for Pcd in PcdExList:
+ RealTokenCName = Pcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ RealTokenCName = PcdItem[0]
+ break
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenCName == TokenCName:
+ Index = Index + 1
+ if Index == 1:
+ AutoGenH.Append('\n#define __PCD_%s_VAL_CMP(GuidPtr) (' % (RealTokenCName))
+ AutoGenH.Append('\\\n (GuidPtr == NULL) ? 0:')
+ AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
+ % (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
+ else:
+ AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
+ % (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
+ if Index == Count:
+ AutoGenH.Append('0 \\\n )\n')
+ # Autogen internal worker macro to compare GUIDs. Guid1 is a pointer to a GUID.
+ # Guid2 is a C name for a GUID. Compare pointers first because optimizing compiler
+ # can do this at build time on CONST GUID pointers and optimize away call to COMPAREGUID().
+ # COMPAREGUID() will only be used if the Guid passed in is local to the module.
+ AutoGenH.Append('#define _PCD_TOKEN_EX_%s(GuidPtr) __PCD_%s_ADDR_CMP(GuidPtr) ? __PCD_%s_ADDR_CMP(GuidPtr) : __PCD_%s_VAL_CMP(GuidPtr) \n'
+ % (RealTokenCName, RealTokenCName, RealTokenCName, RealTokenCName))
+ TokenCNameList.add(TokenCName)
+
+## Create code for module PCDs
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+# @param Pcd The PCD object
+#
+def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
+ TokenSpaceGuidValue = Pcd.TokenSpaceGuidValue #Info.GuidList[Pcd.TokenSpaceGuidCName]
+ PcdTokenNumber = Info.PlatformInfo.PcdTokenNumber
+ #
+ # Write PCDs
+ #
+ TokenCName = Pcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ PcdTokenName = '_PCD_TOKEN_' + TokenCName
+ PatchPcdSizeTokenName = '_PCD_PATCHABLE_' + TokenCName +'_SIZE'
+ PatchPcdSizeVariableName = '_gPcd_BinaryPatch_Size_' + TokenCName
+ PatchPcdMaxSizeVariable = '_gPcd_BinaryPatch_MaxSize_' + TokenCName
+ FixPcdSizeTokenName = '_PCD_SIZE_' + TokenCName
+ FixedPcdSizeVariableName = '_gPcd_FixedAtBuild_Size_' + TokenCName
+
+ if Pcd.PcdValueFromComm:
+ Pcd.DefaultValue = Pcd.PcdValueFromComm
+ elif Pcd.PcdValueFromFdf:
+ Pcd.DefaultValue = Pcd.PcdValueFromFdf
+
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ TokenNumber = int(Pcd.TokenValue, 0)
+ # Add TokenSpaceGuidValue value to PcdTokenName to discriminate the DynamicEx PCDs with
+ # different Guids but same TokenCName
+ PcdExTokenName = '_PCD_TOKEN_' + Pcd.TokenSpaceGuidCName + '_' + TokenCName
+ AutoGenH.Append('\n#define %s %dU\n' % (PcdExTokenName, TokenNumber))
+ else:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber:
+ # If one of the Source built modules listed in the DSC is not listed in FDF modules,
+ # and the INF lists a PCD can only use the PcdsDynamic access method (it is only
+ # listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
+ # report warning message notify the PI that they are attempting to build a module
+ # that must be included in a flash image in order to be functional. These Dynamic PCD
+ # will not be added into the Database unless it is used by other modules that are
+ # included in the FDF file.
+ # In this case, just assign an invalid token number to make it pass build.
+ if Pcd.Type in PCD_DYNAMIC_TYPE_SET:
+ TokenNumber = 0
+ else:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "No generated token number for %s.%s\n" % (Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+ else:
+ TokenNumber = PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]
+ AutoGenH.Append('\n#define %s %dU\n' % (PcdTokenName, TokenNumber))
+
+ EdkLogger.debug(EdkLogger.DEBUG_3, "Creating code for " + TokenCName + "." + Pcd.TokenSpaceGuidCName)
+ if Pcd.Type not in gItemTypeStringDatabase:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Unknown PCD type [%s] of PCD %s.%s" % (Pcd.Type, Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+
+ DatumSize = gDatumSizeStringDatabase[Pcd.DatumType] if Pcd.DatumType in gDatumSizeStringDatabase else gDatumSizeStringDatabase[TAB_VOID]
+ DatumSizeLib = gDatumSizeStringDatabaseLib[Pcd.DatumType] if Pcd.DatumType in gDatumSizeStringDatabaseLib else gDatumSizeStringDatabaseLib[TAB_VOID]
+ GetModeName = '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
+ SetModeName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
+ SetModeStatusName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_S_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_S_' + TokenCName
+ GetModeSizeName = '_PCD_GET_MODE_SIZE' + '_' + TokenCName
+
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ if Info.IsLibrary:
+ PcdList = Info.LibraryPcdList
+ else:
+ PcdList = Info.ModulePcdList + Info.LibraryPcdList
+ PcdExCNameTest = 0
+ for PcdModule in PcdList:
+ if PcdModule.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
+ PcdExCNameTest += 1
+ # get out early once we found > 1...
+ if PcdExCNameTest > 1:
+ break
+ # Be compatible with the current code which using PcdToken and PcdGet/Set for DynamicEx Pcd.
+ # If only PcdToken and PcdGet/Set used in all Pcds with different CName, it should succeed to build.
+ # If PcdToken and PcdGet/Set used in the Pcds with different Guids but same CName, it should failed to build.
+ if PcdExCNameTest > 1:
+ AutoGenH.Append('// Disabled the macros, as PcdToken and PcdGet/Set are not allowed in the case that more than one DynamicEx Pcds are different Guids but same CName.\n')
+ AutoGenH.Append('// #define %s %s\n' % (PcdTokenName, PcdExTokenName))
+ AutoGenH.Append('// #define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('// #define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('// #define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('// #define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('#define %s %s\n' % (PcdTokenName, PcdExTokenName))
+ AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('#define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('#define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('#define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ elif Pcd.Type in PCD_DYNAMIC_TYPE_SET:
+ PcdCNameTest = 0
+ for PcdModule in Info.LibraryPcdList + Info.ModulePcdList:
+ if PcdModule.Type in PCD_DYNAMIC_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
+ PcdCNameTest += 1
+ # get out early once we found > 1...
+ if PcdCNameTest > 1:
+ break
+ if PcdCNameTest > 1:
+ EdkLogger.error("build", AUTOGEN_ERROR, "More than one Dynamic Pcds [%s] are different Guids but same CName. They need to be changed to DynamicEx type to avoid the confliction.\n" % (TokenCName), ExtraData="[%s]" % str(Info.MetaFile.Path))
+ else:
+ AutoGenH.Append('#define %s LibPcdGet%s(%s)\n' % (GetModeName, DatumSizeLib, PcdTokenName))
+ AutoGenH.Append('#define %s LibPcdGetSize(%s)\n' % (GetModeSizeName, PcdTokenName))
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%s(%s, (SizeOfBuffer), (Buffer))\n' %(SetModeName, DatumSizeLib, PcdTokenName))
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%sS(%s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
+ else:
+ AutoGenH.Append('#define %s(Value) LibPcdSet%s(%s, (Value))\n' % (SetModeName, DatumSizeLib, PcdTokenName))
+ AutoGenH.Append('#define %s(Value) LibPcdSet%sS(%s, (Value))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
+ else:
+ PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + TokenCName
+ Const = 'const'
+ if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ Const = ''
+ Type = ''
+ Array = ''
+ Value = Pcd.DefaultValue
+ Unicode = False
+ ValueNumber = 0
+
+ if Pcd.DatumType == 'BOOLEAN':
+ BoolValue = Value.upper()
+ if BoolValue == 'TRUE' or BoolValue == '1':
+ Value = '1U'
+ elif BoolValue == 'FALSE' or BoolValue == '0':
+ Value = '0U'
+
+ if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
+ try:
+ if Value.upper().endswith('L'):
+ Value = Value[:-1]
+ if Value.startswith('0') and not Value.lower().startswith('0x') and len(Value) > 1 and Value.lstrip('0'):
+ Value = Value.lstrip('0')
+ ValueNumber = int (Value, 0)
+ except:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "PCD value is not valid dec or hex number for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+ if ValueNumber < 0:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "PCD can't be set to negative value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+ elif ValueNumber > MAX_VAL_TYPE[Pcd.DatumType]:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+ if Pcd.DatumType == TAB_UINT64 and not Value.endswith('ULL'):
+ Value += 'ULL'
+ elif Pcd.DatumType != TAB_UINT64 and not Value.endswith('U'):
+ Value += 'U'
+
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ if not Pcd.MaxDatumSize:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+
+ ArraySize = int(Pcd.MaxDatumSize, 0)
+ if Value[0] == '{':
+ Type = '(VOID *)'
+ ValueSize = len(Value.split(','))
+ else:
+ if Value[0] == 'L':
+ Unicode = True
+ Value = Value.lstrip('L') #.strip('"')
+ Value = eval(Value) # translate escape character
+ ValueSize = len(Value) + 1
+ NewValue = '{'
+ for Index in range(0, len(Value)):
+ if Unicode:
+ NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ', '
+ else:
+ NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ', '
+ if Unicode:
+ ArraySize = ArraySize // 2
+ Value = NewValue + '0 }'
+ if ArraySize < ValueSize:
+ if Pcd.MaxSizeUserSet:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+ else:
+ ArraySize = Pcd.GetPcdSize()
+ if Unicode:
+ ArraySize = ArraySize // 2
+ Array = '[%d]' % ArraySize
+ #
+ # skip casting for fixed at build since it breaks ARM assembly.
+ # Long term we need PCD macros that work in assembly
+ #
+ elif Pcd.Type != TAB_PCDS_FIXED_AT_BUILD and Pcd.DatumType in TAB_PCD_NUMERIC_TYPES_VOID:
+ Value = "((%s)%s)" % (Pcd.DatumType, Value)
+
+ if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ PcdValueName = '_PCD_PATCHABLE_VALUE_' + TokenCName
+ else:
+ PcdValueName = '_PCD_VALUE_' + TokenCName
+
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ #
+ # For unicode, UINT16 array will be generated, so the alignment of unicode is guaranteed.
+ #
+ AutoGenH.Append('#define %s %s%s\n' %(PcdValueName, Type, PcdVariableName))
+ if Unicode:
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT16 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value))
+ AutoGenH.Append('extern %s UINT16 %s%s;\n' %(Const, PcdVariableName, Array))
+ else:
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT8 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value))
+ AutoGenH.Append('extern %s UINT8 %s%s;\n' %(Const, PcdVariableName, Array))
+ AutoGenH.Append('#define %s %s%s\n' %(GetModeName, Type, PcdVariableName))
+
+ PcdDataSize = Pcd.GetPcdSize()
+ if Pcd.Type == TAB_PCDS_FIXED_AT_BUILD:
+ AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
+ AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, FixPcdSizeTokenName))
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED const UINTN %s = %s;\n' % (FixedPcdSizeVariableName, PcdDataSize))
+ if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, Pcd.MaxDatumSize))
+ AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, PatchPcdSizeVariableName))
+ AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName, PcdDataSize))
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED const UINTN %s = %s;\n' % (PatchPcdMaxSizeVariable, Pcd.MaxDatumSize))
+ elif Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value))
+ AutoGenC.Append('volatile %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
+ AutoGenH.Append('extern volatile %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array))
+ AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName))
+
+ PcdDataSize = Pcd.GetPcdSize()
+ AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize))
+
+ AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, PatchPcdSizeVariableName))
+ AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName, PcdDataSize))
+ else:
+ PcdDataSize = Pcd.GetPcdSize()
+ AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
+ AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, FixPcdSizeTokenName))
+
+ AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value))
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
+ AutoGenH.Append('extern %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array))
+ AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName))
+
+ if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSize((VOID *)_gPcd_BinaryPatch_%s, &_gPcd_BinaryPatch_Size_%s, (UINTN)_PCD_PATCHABLE_%s_SIZE, (SizeOfBuffer), (Buffer))\n' % (SetModeName, Pcd.TokenCName, Pcd.TokenCName, Pcd.TokenCName))
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSizeS((VOID *)_gPcd_BinaryPatch_%s, &_gPcd_BinaryPatch_Size_%s, (UINTN)_PCD_PATCHABLE_%s_SIZE, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, Pcd.TokenCName, Pcd.TokenCName, Pcd.TokenCName))
+ else:
+ AutoGenH.Append('#define %s(Value) (%s = (Value))\n' % (SetModeName, PcdVariableName))
+ AutoGenH.Append('#define %s(Value) ((%s = (Value)), RETURN_SUCCESS) \n' % (SetModeStatusName, PcdVariableName))
+ else:
+ AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName)
+
+## Create code for library module PCDs
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+# @param Pcd The PCD object
+#
+def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
+ PcdTokenNumber = Info.PlatformInfo.PcdTokenNumber
+ TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
+ TokenCName = Pcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (TokenCName, TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ PcdTokenName = '_PCD_TOKEN_' + TokenCName
+ FixPcdSizeTokenName = '_PCD_SIZE_' + TokenCName
+ PatchPcdSizeTokenName = '_PCD_PATCHABLE_' + TokenCName +'_SIZE'
+ PatchPcdSizeVariableName = '_gPcd_BinaryPatch_Size_' + TokenCName
+ PatchPcdMaxSizeVariable = '_gPcd_BinaryPatch_MaxSize_' + TokenCName
+ FixedPcdSizeVariableName = '_gPcd_FixedAtBuild_Size_' + TokenCName
+
+ if Pcd.PcdValueFromComm:
+ Pcd.DefaultValue = Pcd.PcdValueFromComm
+ elif Pcd.PcdValueFromFdf:
+ Pcd.DefaultValue = Pcd.PcdValueFromFdf
+ #
+ # Write PCDs
+ #
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ TokenNumber = int(Pcd.TokenValue, 0)
+ else:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber:
+ # If one of the Source built modules listed in the DSC is not listed in FDF modules,
+ # and the INF lists a PCD can only use the PcdsDynamic access method (it is only
+ # listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
+ # report warning message notify the PI that they are attempting to build a module
+ # that must be included in a flash image in order to be functional. These Dynamic PCD
+ # will not be added into the Database unless it is used by other modules that are
+ # included in the FDF file.
+ # In this case, just assign an invalid token number to make it pass build.
+ if Pcd.Type in PCD_DYNAMIC_TYPE_SET:
+ TokenNumber = 0
+ else:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "No generated token number for %s.%s\n" % (Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+ else:
+ TokenNumber = PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]
+
+ if Pcd.Type not in gItemTypeStringDatabase:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Unknown PCD type [%s] of PCD %s.%s" % (Pcd.Type, Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+
+ DatumType = Pcd.DatumType
+ DatumSize = gDatumSizeStringDatabase[Pcd.DatumType] if Pcd.DatumType in gDatumSizeStringDatabase else gDatumSizeStringDatabase[TAB_VOID]
+ DatumSizeLib = gDatumSizeStringDatabaseLib[Pcd.DatumType] if Pcd.DatumType in gDatumSizeStringDatabaseLib else gDatumSizeStringDatabaseLib[TAB_VOID]
+ GetModeName = '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
+ SetModeName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
+ SetModeStatusName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_S_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_S_' + TokenCName
+ GetModeSizeName = '_PCD_GET_MODE_SIZE' + '_' + TokenCName
+
+ Type = ''
+ Array = ''
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ if Pcd.DefaultValue[0]== '{':
+ Type = '(VOID *)'
+ Array = '[]'
+ PcdItemType = Pcd.Type
+ if PcdItemType in PCD_DYNAMIC_EX_TYPE_SET:
+ PcdExTokenName = '_PCD_TOKEN_' + TokenSpaceGuidCName + '_' + TokenCName
+ AutoGenH.Append('\n#define %s %dU\n' % (PcdExTokenName, TokenNumber))
+
+ if Info.IsLibrary:
+ PcdList = Info.LibraryPcdList
+ else:
+ PcdList = Info.ModulePcdList
+ PcdExCNameTest = 0
+ for PcdModule in PcdList:
+ if PcdModule.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
+ PcdExCNameTest += 1
+ # get out early once we found > 1...
+ if PcdExCNameTest > 1:
+ break
+ # Be compatible with the current code which using PcdGet/Set for DynamicEx Pcd.
+ # If only PcdGet/Set used in all Pcds with different CName, it should succeed to build.
+ # If PcdGet/Set used in the Pcds with different Guids but same CName, it should failed to build.
+ if PcdExCNameTest > 1:
+ AutoGenH.Append('// Disabled the macros, as PcdToken and PcdGet/Set are not allowed in the case that more than one DynamicEx Pcds are different Guids but same CName.\n')
+ AutoGenH.Append('// #define %s %s\n' % (PcdTokenName, PcdExTokenName))
+ AutoGenH.Append('// #define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('// #define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('// #define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('// #define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('#define %s %s\n' % (PcdTokenName, PcdExTokenName))
+ AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('#define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('#define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('#define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('#define _PCD_TOKEN_%s %dU\n' % (TokenCName, TokenNumber))
+ if PcdItemType in PCD_DYNAMIC_TYPE_SET:
+ PcdList = []
+ PcdCNameList = []
+ PcdList.extend(Info.LibraryPcdList)
+ PcdList.extend(Info.ModulePcdList)
+ for PcdModule in PcdList:
+ if PcdModule.Type in PCD_DYNAMIC_TYPE_SET:
+ PcdCNameList.append(PcdModule.TokenCName)
+ if PcdCNameList.count(Pcd.TokenCName) > 1:
+ EdkLogger.error("build", AUTOGEN_ERROR, "More than one Dynamic Pcds [%s] are different Guids but same CName.They need to be changed to DynamicEx type to avoid the confliction.\n" % (TokenCName), ExtraData="[%s]" % str(Info.MetaFile.Path))
+ else:
+ AutoGenH.Append('#define %s LibPcdGet%s(%s)\n' % (GetModeName, DatumSizeLib, PcdTokenName))
+ AutoGenH.Append('#define %s LibPcdGetSize(%s)\n' % (GetModeSizeName, PcdTokenName))
+ if DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%s(%s, (SizeOfBuffer), (Buffer))\n' %(SetModeName, DatumSizeLib, PcdTokenName))
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%sS(%s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
+ else:
+ AutoGenH.Append('#define %s(Value) LibPcdSet%s(%s, (Value))\n' % (SetModeName, DatumSizeLib, PcdTokenName))
+ AutoGenH.Append('#define %s(Value) LibPcdSet%sS(%s, (Value))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
+ if PcdItemType == TAB_PCDS_PATCHABLE_IN_MODULE:
+ PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[TAB_PCDS_PATCHABLE_IN_MODULE] + '_' + TokenCName
+ if DatumType not in TAB_PCD_NUMERIC_TYPES:
+ if DatumType == TAB_VOID and Array == '[]':
+ DatumType = [TAB_UINT8, TAB_UINT16][Pcd.DefaultValue[0] == 'L']
+ else:
+ DatumType = TAB_UINT8
+ AutoGenH.Append('extern %s _gPcd_BinaryPatch_%s%s;\n' %(DatumType, TokenCName, Array))
+ else:
+ AutoGenH.Append('extern volatile %s %s%s;\n' % (DatumType, PcdVariableName, Array))
+ AutoGenH.Append('#define %s %s_gPcd_BinaryPatch_%s\n' %(GetModeName, Type, TokenCName))
+ PcdDataSize = Pcd.GetPcdSize()
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSize((VOID *)_gPcd_BinaryPatch_%s, &%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, TokenCName, PatchPcdSizeVariableName, PatchPcdMaxSizeVariable))
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSizeS((VOID *)_gPcd_BinaryPatch_%s, &%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, TokenCName, PatchPcdSizeVariableName, PatchPcdMaxSizeVariable))
+ AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PatchPcdMaxSizeVariable))
+ AutoGenH.Append('extern const UINTN %s; \n' % PatchPcdMaxSizeVariable)
+ else:
+ AutoGenH.Append('#define %s(Value) (%s = (Value))\n' % (SetModeName, PcdVariableName))
+ AutoGenH.Append('#define %s(Value) ((%s = (Value)), RETURN_SUCCESS)\n' % (SetModeStatusName, PcdVariableName))
+ AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize))
+
+ AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, PatchPcdSizeVariableName))
+ AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
+
+ if PcdItemType == TAB_PCDS_FIXED_AT_BUILD or PcdItemType == TAB_PCDS_FEATURE_FLAG:
+ key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + TokenCName
+ if DatumType == TAB_VOID and Array == '[]':
+ DatumType = [TAB_UINT8, TAB_UINT16][Pcd.DefaultValue[0] == 'L']
+ if DatumType not in TAB_PCD_NUMERIC_TYPES_VOID:
+ DatumType = TAB_UINT8
+ AutoGenH.Append('extern const %s _gPcd_FixedAtBuild_%s%s;\n' %(DatumType, TokenCName, Array))
+ AutoGenH.Append('#define %s %s_gPcd_FixedAtBuild_%s\n' %(GetModeName, Type, TokenCName))
+ AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName)
+
+ ConstFixedPcd = False
+ if PcdItemType == TAB_PCDS_FIXED_AT_BUILD and (key in Info.ConstPcd or (Info.IsLibrary and not Info.ReferenceModules)):
+ ConstFixedPcd = True
+ if key in Info.ConstPcd:
+ Pcd.DefaultValue = Info.ConstPcd[key]
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define _PCD_VALUE_%s %s%s\n' %(TokenCName, Type, PcdVariableName))
+ else:
+ AutoGenH.Append('#define _PCD_VALUE_%s %s\n' %(TokenCName, Pcd.DefaultValue))
+ PcdDataSize = Pcd.GetPcdSize()
+ if PcdItemType == TAB_PCDS_FIXED_AT_BUILD:
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ if ConstFixedPcd:
+ AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
+ AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, FixPcdSizeTokenName))
+ else:
+ AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, FixedPcdSizeVariableName))
+ AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, FixedPcdSizeVariableName))
+ AutoGenH.Append('extern const UINTN %s; \n' % FixedPcdSizeVariableName)
+ else:
+ AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
+ AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, FixPcdSizeTokenName))
+
+## Create code for library constructor
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH):
+ #
+ # Library Constructors
+ #
+ ConstructorPrototypeString = TemplateString()
+ ConstructorCallingString = TemplateString()
+ if Info.IsLibrary:
+ DependentLibraryList = [Info.Module]
+ else:
+ DependentLibraryList = Info.DependentLibraryList
+ for Lib in DependentLibraryList:
+ if len(Lib.ConstructorList) <= 0:
+ continue
+ Dict = {'Function':Lib.ConstructorList}
+ if Lib.ModuleType in [SUP_MODULE_BASE, SUP_MODULE_SEC]:
+ ConstructorPrototypeString.Append(gLibraryStructorPrototype[SUP_MODULE_BASE].Replace(Dict))
+ ConstructorCallingString.Append(gLibraryStructorCall[SUP_MODULE_BASE].Replace(Dict))
+ if Info.ModuleType not in [SUP_MODULE_BASE, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION]:
+ if Lib.ModuleType in SUP_MODULE_SET_PEI:
+ ConstructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict))
+ ConstructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict))
+ elif Lib.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
+ SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
+ ConstructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict))
+ ConstructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict))
+ elif Lib.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
+ ConstructorPrototypeString.Append(gLibraryStructorPrototype['MM'].Replace(Dict))
+ ConstructorCallingString.Append(gLibraryStructorCall['MM'].Replace(Dict))
+
+ if str(ConstructorPrototypeString) == '':
+ ConstructorPrototypeList = []
+ else:
+ ConstructorPrototypeList = [str(ConstructorPrototypeString)]
+ if str(ConstructorCallingString) == '':
+ ConstructorCallingList = []
+ else:
+ ConstructorCallingList = [str(ConstructorCallingString)]
+
+ Dict = {
+ 'Type' : 'Constructor',
+ 'FunctionPrototype' : ConstructorPrototypeList,
+ 'FunctionCall' : ConstructorCallingList
+ }
+ if Info.IsLibrary:
+ AutoGenH.Append("${BEGIN}${FunctionPrototype}${END}", Dict)
+ else:
+ if Info.ModuleType in [SUP_MODULE_BASE, SUP_MODULE_SEC, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION]:
+ AutoGenC.Append(gLibraryString[SUP_MODULE_BASE].Replace(Dict))
+ elif Info.ModuleType in SUP_MODULE_SET_PEI:
+ AutoGenC.Append(gLibraryString['PEI'].Replace(Dict))
+ elif Info.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
+ SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
+ AutoGenC.Append(gLibraryString['DXE'].Replace(Dict))
+ elif Info.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
+ AutoGenC.Append(gLibraryString['MM'].Replace(Dict))
+
+## Create code for library destructor
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH):
+ #
+ # Library Destructors
+ #
+ DestructorPrototypeString = TemplateString()
+ DestructorCallingString = TemplateString()
+ if Info.IsLibrary:
+ DependentLibraryList = [Info.Module]
+ else:
+ DependentLibraryList = Info.DependentLibraryList
+ for Index in range(len(DependentLibraryList)-1, -1, -1):
+ Lib = DependentLibraryList[Index]
+ if len(Lib.DestructorList) <= 0:
+ continue
+ Dict = {'Function':Lib.DestructorList}
+ if Lib.ModuleType in [SUP_MODULE_BASE, SUP_MODULE_SEC]:
+ DestructorPrototypeString.Append(gLibraryStructorPrototype[SUP_MODULE_BASE].Replace(Dict))
+ DestructorCallingString.Append(gLibraryStructorCall[SUP_MODULE_BASE].Replace(Dict))
+ if Info.ModuleType not in [SUP_MODULE_BASE, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION]:
+ if Lib.ModuleType in SUP_MODULE_SET_PEI:
+ DestructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict))
+ DestructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict))
+ elif Lib.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
+ SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
+ DestructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict))
+ DestructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict))
+ elif Lib.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
+ DestructorPrototypeString.Append(gLibraryStructorPrototype['MM'].Replace(Dict))
+ DestructorCallingString.Append(gLibraryStructorCall['MM'].Replace(Dict))
+
+ if str(DestructorPrototypeString) == '':
+ DestructorPrototypeList = []
+ else:
+ DestructorPrototypeList = [str(DestructorPrototypeString)]
+ if str(DestructorCallingString) == '':
+ DestructorCallingList = []
+ else:
+ DestructorCallingList = [str(DestructorCallingString)]
+
+ Dict = {
+ 'Type' : 'Destructor',
+ 'FunctionPrototype' : DestructorPrototypeList,
+ 'FunctionCall' : DestructorCallingList
+ }
+ if Info.IsLibrary:
+ AutoGenH.Append("${BEGIN}${FunctionPrototype}${END}", Dict)
+ else:
+ if Info.ModuleType in [SUP_MODULE_BASE, SUP_MODULE_SEC, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION]:
+ AutoGenC.Append(gLibraryString[SUP_MODULE_BASE].Replace(Dict))
+ elif Info.ModuleType in SUP_MODULE_SET_PEI:
+ AutoGenC.Append(gLibraryString['PEI'].Replace(Dict))
+ elif Info.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
+ SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
+ AutoGenC.Append(gLibraryString['DXE'].Replace(Dict))
+ elif Info.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
+ AutoGenC.Append(gLibraryString['MM'].Replace(Dict))
+
+
+## Create code for ModuleEntryPoint
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH):
+ if Info.IsLibrary or Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_SEC]:
+ return
+ #
+ # Module Entry Points
+ #
+ NumEntryPoints = len(Info.Module.ModuleEntryPointList)
+ if 'PI_SPECIFICATION_VERSION' in Info.Module.Specification:
+ PiSpecVersion = Info.Module.Specification['PI_SPECIFICATION_VERSION']
+ else:
+ PiSpecVersion = '0x00000000'
+ if 'UEFI_SPECIFICATION_VERSION' in Info.Module.Specification:
+ UefiSpecVersion = Info.Module.Specification['UEFI_SPECIFICATION_VERSION']
+ else:
+ UefiSpecVersion = '0x00000000'
+ Dict = {
+ 'Function' : Info.Module.ModuleEntryPointList,
+ 'PiSpecVersion' : PiSpecVersion + 'U',
+ 'UefiSpecVersion': UefiSpecVersion + 'U'
+ }
+
+ if Info.ModuleType in [SUP_MODULE_PEI_CORE, SUP_MODULE_DXE_CORE, SUP_MODULE_SMM_CORE, SUP_MODULE_MM_CORE_STANDALONE]:
+ if Info.SourceFileList:
+ if NumEntryPoints != 1:
+ EdkLogger.error(
+ "build",
+ AUTOGEN_ERROR,
+ '%s must have exactly one entry point' % Info.ModuleType,
+ File=str(Info),
+ ExtraData= ", ".join(Info.Module.ModuleEntryPointList)
+ )
+ if Info.ModuleType == SUP_MODULE_PEI_CORE:
+ AutoGenC.Append(gPeiCoreEntryPointString.Replace(Dict))
+ AutoGenH.Append(gPeiCoreEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_DXE_CORE:
+ AutoGenC.Append(gDxeCoreEntryPointString.Replace(Dict))
+ AutoGenH.Append(gDxeCoreEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_SMM_CORE:
+ AutoGenC.Append(gSmmCoreEntryPointString.Replace(Dict))
+ AutoGenH.Append(gSmmCoreEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_MM_CORE_STANDALONE:
+ AutoGenC.Append(gMmCoreStandaloneEntryPointString.Replace(Dict))
+ AutoGenH.Append(gMmCoreStandaloneEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_PEIM:
+ if NumEntryPoints < 2:
+ AutoGenC.Append(gPeimEntryPointString[NumEntryPoints].Replace(Dict))
+ else:
+ AutoGenC.Append(gPeimEntryPointString[2].Replace(Dict))
+ AutoGenH.Append(gPeimEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType in [SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:
+ if NumEntryPoints < 2:
+ AutoGenC.Append(gUefiDriverEntryPointString[NumEntryPoints].Replace(Dict))
+ else:
+ AutoGenC.Append(gUefiDriverEntryPointString[2].Replace(Dict))
+ AutoGenH.Append(gUefiDriverEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_DXE_SMM_DRIVER:
+ if NumEntryPoints == 0:
+ AutoGenC.Append(gDxeSmmEntryPointString[0].Replace(Dict))
+ else:
+ AutoGenC.Append(gDxeSmmEntryPointString[1].Replace(Dict))
+ AutoGenH.Append(gDxeSmmEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_MM_STANDALONE:
+ if NumEntryPoints < 2:
+ AutoGenC.Append(gMmStandaloneEntryPointString[NumEntryPoints].Replace(Dict))
+ else:
+ AutoGenC.Append(gMmStandaloneEntryPointString[2].Replace(Dict))
+ AutoGenH.Append(gMmStandaloneEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_UEFI_APPLICATION:
+ if NumEntryPoints < 2:
+ AutoGenC.Append(gUefiApplicationEntryPointString[NumEntryPoints].Replace(Dict))
+ else:
+ AutoGenC.Append(gUefiApplicationEntryPointString[2].Replace(Dict))
+ AutoGenH.Append(gUefiApplicationEntryPointPrototype.Replace(Dict))
+
+## Create code for ModuleUnloadImage
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateModuleUnloadImageCode(Info, AutoGenC, AutoGenH):
+ if Info.IsLibrary or Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE, SUP_MODULE_SEC]:
+ return
+ #
+ # Unload Image Handlers
+ #
+ NumUnloadImage = len(Info.Module.ModuleUnloadImageList)
+ Dict = {'Count':str(NumUnloadImage) + 'U', 'Function':Info.Module.ModuleUnloadImageList}
+ if NumUnloadImage < 2:
+ AutoGenC.Append(gUefiUnloadImageString[NumUnloadImage].Replace(Dict))
+ else:
+ AutoGenC.Append(gUefiUnloadImageString[2].Replace(Dict))
+ AutoGenH.Append(gUefiUnloadImagePrototype.Replace(Dict))
+
+## Create code for GUID
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateGuidDefinitionCode(Info, AutoGenC, AutoGenH):
+ if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE]:
+ GuidType = TAB_GUID
+ else:
+ GuidType = "EFI_GUID"
+
+ if Info.GuidList:
+ if not Info.IsLibrary:
+ AutoGenC.Append("\n// Guids\n")
+ AutoGenH.Append("\n// Guids\n")
+ #
+ # GUIDs
+ #
+ for Key in Info.GuidList:
+ if not Info.IsLibrary:
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.GuidList[Key]))
+ AutoGenH.Append('extern %s %s;\n' % (GuidType, Key))
+
+## Create code for protocol
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateProtocolDefinitionCode(Info, AutoGenC, AutoGenH):
+ if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE]:
+ GuidType = TAB_GUID
+ else:
+ GuidType = "EFI_GUID"
+
+ if Info.ProtocolList:
+ if not Info.IsLibrary:
+ AutoGenC.Append("\n// Protocols\n")
+ AutoGenH.Append("\n// Protocols\n")
+ #
+ # Protocol GUIDs
+ #
+ for Key in Info.ProtocolList:
+ if not Info.IsLibrary:
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.ProtocolList[Key]))
+ AutoGenH.Append('extern %s %s;\n' % (GuidType, Key))
+
+## Create code for PPI
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreatePpiDefinitionCode(Info, AutoGenC, AutoGenH):
+ if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE]:
+ GuidType = TAB_GUID
+ else:
+ GuidType = "EFI_GUID"
+
+ if Info.PpiList:
+ if not Info.IsLibrary:
+ AutoGenC.Append("\n// PPIs\n")
+ AutoGenH.Append("\n// PPIs\n")
+ #
+ # PPI GUIDs
+ #
+ for Key in Info.PpiList:
+ if not Info.IsLibrary:
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.PpiList[Key]))
+ AutoGenH.Append('extern %s %s;\n' % (GuidType, Key))
+
+## Create code for PCD
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreatePcdCode(Info, AutoGenC, AutoGenH):
+
+ # Collect Token Space GUIDs used by DynamicEc PCDs
+ TokenSpaceList = []
+ for Pcd in Info.ModulePcdList:
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenSpaceGuidCName not in TokenSpaceList:
+ TokenSpaceList.append(Pcd.TokenSpaceGuidCName)
+
+ SkuMgr = Info.PlatformInfo.Platform.SkuIdMgr
+ AutoGenH.Append("\n// Definition of SkuId Array\n")
+ AutoGenH.Append("extern UINT64 _gPcd_SkuId_Array[];\n")
+ # Add extern declarations to AutoGen.h if one or more Token Space GUIDs were found
+ if TokenSpaceList:
+ AutoGenH.Append("\n// Definition of PCD Token Space GUIDs used in this module\n\n")
+ if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE]:
+ GuidType = TAB_GUID
+ else:
+ GuidType = "EFI_GUID"
+ for Item in TokenSpaceList:
+ AutoGenH.Append('extern %s %s;\n' % (GuidType, Item))
+
+ if Info.IsLibrary:
+ if Info.ModulePcdList:
+ AutoGenH.Append("\n// PCD definitions\n")
+ for Pcd in Info.ModulePcdList:
+ CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd)
+ DynExPcdTokenNumberMapping (Info, AutoGenH)
+ else:
+ AutoGenC.Append("\n// Definition of SkuId Array\n")
+ AutoGenC.Append("GLOBAL_REMOVE_IF_UNREFERENCED UINT64 _gPcd_SkuId_Array[] = %s;\n" % SkuMgr.DumpSkuIdArrary())
+ if Info.ModulePcdList:
+ AutoGenH.Append("\n// Definition of PCDs used in this module\n")
+ AutoGenC.Append("\n// Definition of PCDs used in this module\n")
+ for Pcd in Info.ModulePcdList:
+ CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd)
+ DynExPcdTokenNumberMapping (Info, AutoGenH)
+ if Info.LibraryPcdList:
+ AutoGenH.Append("\n// Definition of PCDs used in libraries is in AutoGen.c\n")
+ AutoGenC.Append("\n// Definition of PCDs used in libraries\n")
+ for Pcd in Info.LibraryPcdList:
+ CreateModulePcdCode(Info, AutoGenC, AutoGenC, Pcd)
+ CreatePcdDatabaseCode(Info, AutoGenC, AutoGenH)
+
+## Create code for unicode string definition
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
+# @param UniGenBinBuffer Buffer to store uni string package data
+#
+def CreateUnicodeStringCode(Info, AutoGenC, AutoGenH, UniGenCFlag, UniGenBinBuffer):
+ WorkingDir = os.getcwd()
+ os.chdir(Info.WorkspaceDir)
+
+ IncList = [Info.MetaFile.Dir]
+ # Get all files under [Sources] section in inf file for EDK-II module
+ EDK2Module = True
+ SrcList = [F for F in Info.SourceFileList]
+
+ if 'BUILD' in Info.BuildOption and Info.BuildOption['BUILD']['FLAGS'].find('-c') > -1:
+ CompatibleMode = True
+ else:
+ CompatibleMode = False
+
+ #
+ # -s is a temporary option dedicated for building .UNI files with ISO 639-2 language codes of EDK Shell in EDK2
+ #
+ if 'BUILD' in Info.BuildOption and Info.BuildOption['BUILD']['FLAGS'].find('-s') > -1:
+ if CompatibleMode:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "-c and -s build options should be used exclusively",
+ ExtraData="[%s]" % str(Info))
+ ShellMode = True
+ else:
+ ShellMode = False
+
+ #RFC4646 is only for EDKII modules and ISO639-2 for EDK modules
+ if EDK2Module:
+ FilterInfo = [EDK2Module] + [Info.PlatformInfo.Platform.RFCLanguages]
+ else:
+ FilterInfo = [EDK2Module] + [Info.PlatformInfo.Platform.ISOLanguages]
+ Header, Code = GetStringFiles(Info.UnicodeFileList, SrcList, IncList, Info.IncludePathList, ['.uni', '.inf'], Info.Name, CompatibleMode, ShellMode, UniGenCFlag, UniGenBinBuffer, FilterInfo)
+ if CompatibleMode or UniGenCFlag:
+ AutoGenC.Append("\n//\n//Unicode String Pack Definition\n//\n")
+ AutoGenC.Append(Code)
+ AutoGenC.Append("\n")
+ AutoGenH.Append("\n//\n//Unicode String ID\n//\n")
+ AutoGenH.Append(Header)
+ if CompatibleMode or UniGenCFlag:
+ AutoGenH.Append("\n#define STRING_ARRAY_NAME %sStrings\n" % Info.Name)
+ os.chdir(WorkingDir)
+
+def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
+ if len(Info.IdfFileList) > 0:
+ ImageFiles = IdfFileClassObject(sorted (Info.IdfFileList))
+ if ImageFiles.ImageFilesDict:
+ Index = 1
+ PaletteIndex = 1
+ IncList = [Info.MetaFile.Dir]
+ SrcList = [F for F in Info.SourceFileList]
+ SkipList = ['.jpg', '.png', '.bmp', '.inf', '.idf']
+ FileList = GetFileList(SrcList, IncList, SkipList)
+ ValueStartPtr = 60
+ StringH.Append("\n//\n//Image ID\n//\n")
+ ImageInfoOffset = 0
+ PaletteInfoOffset = 0
+ ImageBuffer = pack('x')
+ PaletteBuffer = pack('x')
+ BufferStr = ''
+ PaletteStr = ''
+ FileDict = {}
+ for Idf in ImageFiles.ImageFilesDict:
+ if ImageFiles.ImageFilesDict[Idf]:
+ for FileObj in ImageFiles.ImageFilesDict[Idf]:
+ for sourcefile in Info.SourceFileList:
+ if FileObj.FileName == sourcefile.File:
+ if not sourcefile.Ext.upper() in ['.PNG', '.BMP', '.JPG']:
+ EdkLogger.error("build", AUTOGEN_ERROR, "The %s's postfix must be one of .bmp, .jpg, .png" % (FileObj.FileName), ExtraData="[%s]" % str(Info))
+ FileObj.File = sourcefile
+ break
+ else:
+ EdkLogger.error("build", AUTOGEN_ERROR, "The %s in %s is not defined in the driver's [Sources] section" % (FileObj.FileName, Idf), ExtraData="[%s]" % str(Info))
+
+ for FileObj in ImageFiles.ImageFilesDict[Idf]:
+ ID = FileObj.ImageID
+ File = FileObj.File
+ try:
+ SearchImageID (FileObj, FileList)
+ if FileObj.Referenced:
+ if (ValueStartPtr - len(DEFINE_STR + ID)) <= 0:
+ Line = DEFINE_STR + ' ' + ID + ' ' + DecToHexStr(Index, 4) + '\n'
+ else:
+ Line = DEFINE_STR + ' ' + ID + ' ' * (ValueStartPtr - len(DEFINE_STR + ID)) + DecToHexStr(Index, 4) + '\n'
+
+ if File not in FileDict:
+ FileDict[File] = Index
+ else:
+ DuplicateBlock = pack('B', EFI_HII_IIBT_DUPLICATE)
+ DuplicateBlock += pack('H', FileDict[File])
+ ImageBuffer += DuplicateBlock
+ BufferStr = WriteLine(BufferStr, '// %s: %s: %s' % (DecToHexStr(Index, 4), ID, DecToHexStr(Index, 4)))
+ TempBufferList = AscToHexList(DuplicateBlock)
+ BufferStr = WriteLine(BufferStr, CreateArrayItem(TempBufferList, 16) + '\n')
+ StringH.Append(Line)
+ Index += 1
+ continue
+
+ TmpFile = open(File.Path, 'rb')
+ Buffer = TmpFile.read()
+ TmpFile.close()
+ if File.Ext.upper() == '.PNG':
+ TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_PNG)
+ TempBuffer += pack('I', len(Buffer))
+ TempBuffer += Buffer
+ elif File.Ext.upper() == '.JPG':
+ ImageType, = struct.unpack('4s', Buffer[6:10])
+ if ImageType != b'JFIF':
+ EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path)
+ TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG)
+ TempBuffer += pack('I', len(Buffer))
+ TempBuffer += Buffer
+ elif File.Ext.upper() == '.BMP':
+ TempBuffer, TempPalette = BmpImageDecoder(File, Buffer, PaletteIndex, FileObj.TransParent)
+ if len(TempPalette) > 1:
+ PaletteIndex += 1
+ NewPalette = pack('H', len(TempPalette))
+ NewPalette += TempPalette
+ PaletteBuffer += NewPalette
+ PaletteStr = WriteLine(PaletteStr, '// %s: %s: %s' % (DecToHexStr(PaletteIndex - 1, 4), ID, DecToHexStr(PaletteIndex - 1, 4)))
+ TempPaletteList = AscToHexList(NewPalette)
+ PaletteStr = WriteLine(PaletteStr, CreateArrayItem(TempPaletteList, 16) + '\n')
+ ImageBuffer += TempBuffer
+ BufferStr = WriteLine(BufferStr, '// %s: %s: %s' % (DecToHexStr(Index, 4), ID, DecToHexStr(Index, 4)))
+ TempBufferList = AscToHexList(TempBuffer)
+ BufferStr = WriteLine(BufferStr, CreateArrayItem(TempBufferList, 16) + '\n')
+
+ StringH.Append(Line)
+ Index += 1
+ except IOError:
+ EdkLogger.error("build", FILE_NOT_FOUND, ExtraData=File.Path)
+
+ BufferStr = WriteLine(BufferStr, '// End of the Image Info')
+ BufferStr = WriteLine(BufferStr, CreateArrayItem(DecToHexList(EFI_HII_IIBT_END, 2)) + '\n')
+ ImageEnd = pack('B', EFI_HII_IIBT_END)
+ ImageBuffer += ImageEnd
+
+ if len(ImageBuffer) > 1:
+ ImageInfoOffset = 12
+ if len(PaletteBuffer) > 1:
+ PaletteInfoOffset = 12 + len(ImageBuffer) - 1 # -1 is for the first empty pad byte of ImageBuffer
+
+ IMAGE_PACKAGE_HDR = pack('=II', ImageInfoOffset, PaletteInfoOffset)
+ # PACKAGE_HEADER_Length = PACKAGE_HEADER + ImageInfoOffset + PaletteInfoOffset + ImageBuffer Length + PaletteCount + PaletteBuffer Length
+ if len(PaletteBuffer) > 1:
+ PACKAGE_HEADER_Length = 4 + 4 + 4 + len(ImageBuffer) - 1 + 2 + len(PaletteBuffer) - 1
+ else:
+ PACKAGE_HEADER_Length = 4 + 4 + 4 + len(ImageBuffer) - 1
+ if PaletteIndex > 1:
+ PALETTE_INFO_HEADER = pack('H', PaletteIndex - 1)
+ # EFI_HII_PACKAGE_HEADER length max value is 0xFFFFFF
+ Hex_Length = '%06X' % PACKAGE_HEADER_Length
+ if PACKAGE_HEADER_Length > 0xFFFFFF:
+ EdkLogger.error("build", AUTOGEN_ERROR, "The Length of EFI_HII_PACKAGE_HEADER exceed its maximum value", ExtraData="[%s]" % str(Info))
+ PACKAGE_HEADER = pack('=HBB', int('0x' + Hex_Length[2:], 16), int('0x' + Hex_Length[0:2], 16), EFI_HII_PACKAGE_IMAGES)
+
+ IdfGenBinBuffer.write(PACKAGE_HEADER)
+ IdfGenBinBuffer.write(IMAGE_PACKAGE_HDR)
+ if len(ImageBuffer) > 1 :
+ IdfGenBinBuffer.write(ImageBuffer[1:])
+ if PaletteIndex > 1:
+ IdfGenBinBuffer.write(PALETTE_INFO_HEADER)
+ if len(PaletteBuffer) > 1:
+ IdfGenBinBuffer.write(PaletteBuffer[1:])
+
+ if IdfGenCFlag:
+ TotalLength = EFI_HII_ARRAY_SIZE_LENGTH + PACKAGE_HEADER_Length
+ AutoGenC.Append("\n//\n//Image Pack Definition\n//\n")
+ AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + Info.Module.BaseName + 'Images' + '[] = {\n')
+ AllStr = WriteLine(AllStr, '// STRGATHER_OUTPUT_HEADER')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength)) + '\n')
+ AllStr = WriteLine(AllStr, '// Image PACKAGE HEADER\n')
+ IMAGE_PACKAGE_HDR_List = AscToHexList(PACKAGE_HEADER)
+ IMAGE_PACKAGE_HDR_List += AscToHexList(IMAGE_PACKAGE_HDR)
+ AllStr = WriteLine(AllStr, CreateArrayItem(IMAGE_PACKAGE_HDR_List, 16) + '\n')
+ AllStr = WriteLine(AllStr, '// Image DATA\n')
+ if BufferStr:
+ AllStr = WriteLine(AllStr, BufferStr)
+ if PaletteStr:
+ AllStr = WriteLine(AllStr, '// Palette Header\n')
+ PALETTE_INFO_HEADER_List = AscToHexList(PALETTE_INFO_HEADER)
+ AllStr = WriteLine(AllStr, CreateArrayItem(PALETTE_INFO_HEADER_List, 16) + '\n')
+ AllStr = WriteLine(AllStr, '// Palette Data\n')
+ AllStr = WriteLine(AllStr, PaletteStr)
+ AllStr = WriteLine(AllStr, '};')
+ AutoGenC.Append(AllStr)
+ AutoGenC.Append("\n")
+ StringH.Append('\nextern unsigned char ' + Info.Module.BaseName + 'Images[];\n')
+ StringH.Append("\n#define IMAGE_ARRAY_NAME %sImages\n" % Info.Module.BaseName)
+
+# typedef struct _EFI_HII_IMAGE_PACKAGE_HDR {
+# EFI_HII_PACKAGE_HEADER Header; # Standard package header, where Header.Type = EFI_HII_PACKAGE_IMAGES
+# UINT32 ImageInfoOffset;
+# UINT32 PaletteInfoOffset;
+# } EFI_HII_IMAGE_PACKAGE_HDR;
+
+# typedef struct {
+# UINT32 Length:24;
+# UINT32 Type:8;
+# UINT8 Data[];
+# } EFI_HII_PACKAGE_HEADER;
+
+# typedef struct _EFI_HII_IMAGE_BLOCK {
+# UINT8 BlockType;
+# UINT8 BlockBody[];
+# } EFI_HII_IMAGE_BLOCK;
+
+def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
+ ImageType, = struct.unpack('2s', Buffer[0:2])
+ if ImageType!= b'BM': # BMP file type is 'BM'
+ EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)
+ BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant'])
+ BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')
+ BmpHeader = BMP_IMAGE_HEADER._make(BMP_IMAGE_HEADER_STRUCT.unpack_from(Buffer[2:]))
+ #
+ # Doesn't support compress.
+ #
+ if BmpHeader.biCompression != 0:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "The compress BMP file %s is not support." % File.Path)
+
+ # The Width and Height is UINT16 type in Image Package
+ if BmpHeader.biWidth > 0xFFFF:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "The BMP file %s Width is exceed 0xFFFF." % File.Path)
+ if BmpHeader.biHeight > 0xFFFF:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "The BMP file %s Height is exceed 0xFFFF." % File.Path)
+
+ PaletteBuffer = pack('x')
+ if BmpHeader.biBitCount == 1:
+ if TransParent:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_1BIT_TRANS)
+ else:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_1BIT)
+ ImageBuffer += pack('B', PaletteIndex)
+ Width = (BmpHeader.biWidth + 7)//8
+ if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
+ PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
+ elif BmpHeader.biBitCount == 4:
+ if TransParent:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_4BIT_TRANS)
+ else:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_4BIT)
+ ImageBuffer += pack('B', PaletteIndex)
+ Width = (BmpHeader.biWidth + 1)//2
+ if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
+ PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
+ elif BmpHeader.biBitCount == 8:
+ if TransParent:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_8BIT_TRANS)
+ else:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_8BIT)
+ ImageBuffer += pack('B', PaletteIndex)
+ Width = BmpHeader.biWidth
+ if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
+ PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
+ elif BmpHeader.biBitCount == 24:
+ if TransParent:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_24BIT_TRANS)
+ else:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_24BIT)
+ Width = BmpHeader.biWidth * 3
+ else:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "Only support the 1 bit, 4 bit, 8bit, 24 bit BMP files.", ExtraData="[%s]" % str(File.Path))
+
+ ImageBuffer += pack('H', BmpHeader.biWidth)
+ ImageBuffer += pack('H', BmpHeader.biHeight)
+ Start = BmpHeader.bfOffBits
+ End = BmpHeader.bfSize - 1
+ for Height in range(0, BmpHeader.biHeight):
+ if Width % 4 != 0:
+ Start = End + (Width % 4) - 4 - Width
+ else:
+ Start = End - Width
+ ImageBuffer += Buffer[Start + 1 : Start + Width + 1]
+ End = Start
+
+ # handle the Palette info, BMP use 4 bytes for R, G, B and Reserved info while EFI_HII_RGB_PIXEL only have the R, G, B info
+ if PaletteBuffer and len(PaletteBuffer) > 1:
+ PaletteTemp = pack('x')
+ for Index in range(0, len(PaletteBuffer)):
+ if Index % 4 == 3:
+ continue
+ PaletteTemp += PaletteBuffer[Index:Index+1]
+ PaletteBuffer = PaletteTemp[1:]
+ return ImageBuffer, PaletteBuffer
+
+## Create common code
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateHeaderCode(Info, AutoGenC, AutoGenH):
+ # file header
+ AutoGenH.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.h'}))
+ # header file Prologue
+ AutoGenH.Append(gAutoGenHPrologueString.Replace({'File':'AUTOGENH','Guid':Info.Guid.replace('-', '_')}))
+ AutoGenH.Append(gAutoGenHCppPrologueString)
+
+ # header files includes
+ if Info.ModuleType in gModuleTypeHeaderFile:
+ AutoGenH.Append("#include <%s>\n" % gModuleTypeHeaderFile[Info.ModuleType][0])
+ #
+ # if either PcdLib in [LibraryClasses] sections or there exist Pcd section, add PcdLib.h
+ # As if modules only uses FixedPcd, then PcdLib is not needed in [LibraryClasses] section.
+ #
+ if 'PcdLib' in Info.Module.LibraryClasses or Info.Module.Pcds:
+ AutoGenH.Append("#include <Library/PcdLib.h>\n")
+
+ AutoGenH.Append('\nextern GUID gEfiCallerIdGuid;')
+ AutoGenH.Append('\nextern GUID gEdkiiDscPlatformGuid;')
+ AutoGenH.Append('\nextern CHAR8 *gEfiCallerBaseName;\n\n')
+
+ if Info.IsLibrary:
+ return
+
+ AutoGenH.Append("#define EFI_CALLER_ID_GUID \\\n %s\n" % GuidStringToGuidStructureString(Info.Guid))
+ AutoGenH.Append("#define EDKII_DSC_PLATFORM_GUID \\\n %s\n" % GuidStringToGuidStructureString(Info.PlatformInfo.Guid))
+
+ if Info.IsLibrary:
+ return
+ # C file header
+ AutoGenC.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.c'}))
+ # C file header files includes
+ if Info.ModuleType in gModuleTypeHeaderFile:
+ for Inc in gModuleTypeHeaderFile[Info.ModuleType]:
+ AutoGenC.Append("#include <%s>\n" % Inc)
+ else:
+ AutoGenC.Append("#include <%s>\n" % gBasicHeaderFile)
+
+ #
+ # Publish the CallerId Guid
+ #
+ AutoGenC.Append('\nGLOBAL_REMOVE_IF_UNREFERENCED GUID gEfiCallerIdGuid = %s;\n' % GuidStringToGuidStructureString(Info.Guid))
+ AutoGenC.Append('\nGLOBAL_REMOVE_IF_UNREFERENCED GUID gEdkiiDscPlatformGuid = %s;\n' % GuidStringToGuidStructureString(Info.PlatformInfo.Guid))
+ AutoGenC.Append('\nGLOBAL_REMOVE_IF_UNREFERENCED CHAR8 *gEfiCallerBaseName = "%s";\n' % Info.Name)
+
+## Create common code for header file
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateFooterCode(Info, AutoGenC, AutoGenH):
+ AutoGenH.Append(gAutoGenHEpilogueString)
+
+## Create code for a module
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+# @param StringH The TemplateString object for header file
+# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
+# @param UniGenBinBuffer Buffer to store uni string package data
+# @param StringIdf The TemplateString object for header file
+# @param IdfGenCFlag IdfString is generated into AutoGen C file when it is set to True
+# @param IdfGenBinBuffer Buffer to store Idf string package data
+#
+def CreateCode(Info, AutoGenC, AutoGenH, StringH, UniGenCFlag, UniGenBinBuffer, StringIdf, IdfGenCFlag, IdfGenBinBuffer):
+ CreateHeaderCode(Info, AutoGenC, AutoGenH)
+
+ CreateGuidDefinitionCode(Info, AutoGenC, AutoGenH)
+ CreateProtocolDefinitionCode(Info, AutoGenC, AutoGenH)
+ CreatePpiDefinitionCode(Info, AutoGenC, AutoGenH)
+ CreatePcdCode(Info, AutoGenC, AutoGenH)
+ CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH)
+ CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH)
+ CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH)
+ CreateModuleUnloadImageCode(Info, AutoGenC, AutoGenH)
+
+ if Info.UnicodeFileList:
+ FileName = "%sStrDefs.h" % Info.Name
+ StringH.Append(gAutoGenHeaderString.Replace({'FileName':FileName}))
+ StringH.Append(gAutoGenHPrologueString.Replace({'File':'STRDEFS', 'Guid':Info.Guid.replace('-', '_')}))
+ CreateUnicodeStringCode(Info, AutoGenC, StringH, UniGenCFlag, UniGenBinBuffer)
+
+ GuidMacros = []
+ for Guid in Info.Module.Guids:
+ if Guid in Info.Module.GetGuidsUsedByPcd():
+ continue
+ GuidMacros.append('#define %s %s' % (Guid, Info.Module.Guids[Guid]))
+ for Guid, Value in list(Info.Module.Protocols.items()) + list(Info.Module.Ppis.items()):
+ GuidMacros.append('#define %s %s' % (Guid, Value))
+ # supports FixedAtBuild and FeaturePcd usage in VFR file
+ if Info.VfrFileList and Info.ModulePcdList:
+ GuidMacros.append('#define %s %s' % ('FixedPcdGetBool(TokenName)', '_PCD_VALUE_##TokenName'))
+ GuidMacros.append('#define %s %s' % ('FixedPcdGet8(TokenName)', '_PCD_VALUE_##TokenName'))
+ GuidMacros.append('#define %s %s' % ('FixedPcdGet16(TokenName)', '_PCD_VALUE_##TokenName'))
+ GuidMacros.append('#define %s %s' % ('FixedPcdGet32(TokenName)', '_PCD_VALUE_##TokenName'))
+ GuidMacros.append('#define %s %s' % ('FixedPcdGet64(TokenName)', '_PCD_VALUE_##TokenName'))
+ GuidMacros.append('#define %s %s' % ('FeaturePcdGet(TokenName)', '_PCD_VALUE_##TokenName'))
+ for Pcd in Info.ModulePcdList:
+ if Pcd.Type in [TAB_PCDS_FIXED_AT_BUILD, TAB_PCDS_FEATURE_FLAG]:
+ TokenCName = Pcd.TokenCName
+ Value = Pcd.DefaultValue
+ if Pcd.DatumType == 'BOOLEAN':
+ BoolValue = Value.upper()
+ if BoolValue == 'TRUE':
+ Value = '1'
+ elif BoolValue == 'FALSE':
+ Value = '0'
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ GuidMacros.append('#define %s %s' % ('_PCD_VALUE_'+TokenCName, Value))
+
+ if Info.IdfFileList:
+ GuidMacros.append('#include "%sImgDefs.h"' % Info.Name)
+
+ if GuidMacros:
+ StringH.Append('\n#ifdef VFRCOMPILE\n%s\n#endif\n' % '\n'.join(GuidMacros))
+
+ StringH.Append("\n#endif\n")
+ AutoGenH.Append('#include "%s"\n' % FileName)
+
+ if Info.IdfFileList:
+ FileName = "%sImgDefs.h" % Info.Name
+ StringIdf.Append(gAutoGenHeaderString.Replace({'FileName':FileName}))
+ StringIdf.Append(gAutoGenHPrologueString.Replace({'File':'IMAGEDEFS', 'Guid':Info.Guid.replace('-', '_')}))
+ CreateIdfFileCode(Info, AutoGenC, StringIdf, IdfGenCFlag, IdfGenBinBuffer)
+
+ StringIdf.Append("\n#endif\n")
+ AutoGenH.Append('#include "%s"\n' % FileName)
+
+ CreateFooterCode(Info, AutoGenC, AutoGenH)
+
+## Create the code file
+#
+# @param FilePath The path of code file
+# @param Content The content of code file
+# @param IsBinaryFile The flag indicating if the file is binary file or not
+#
+# @retval True If file content is changed or file doesn't exist
+# @retval False If the file exists and the content is not changed
+#
+def Generate(FilePath, Content, IsBinaryFile):
+ return SaveFileOnChange(FilePath, Content, IsBinaryFile)
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenDepex.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenDepex.py
new file mode 100755
index 00000000..901250bb
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenDepex.py
@@ -0,0 +1,464 @@
+## @file
+# This file is used to generate DEPEX file for module's dependency expression
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+## Import Modules
+#
+import sys
+import Common.LongFilePathOs as os
+import re
+import traceback
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from io import BytesIO
+from struct import pack
+from Common.BuildToolError import *
+from Common.Misc import SaveFileOnChange
+from Common.Misc import GuidStructureStringToGuidString
+from Common.Misc import GuidStructureByteArrayToGuidString
+from Common.Misc import GuidStringToGuidStructureString
+from Common import EdkLogger as EdkLogger
+from Common.BuildVersion import gBUILD_VERSION
+from Common.DataType import *
+
+## Regular expression for matching "DEPENDENCY_START ... DEPENDENCY_END"
+gStartClosePattern = re.compile(".*DEPENDENCY_START(.+)DEPENDENCY_END.*", re.S)
+
+## Mapping between module type and EFI phase
+gType2Phase = {
+ SUP_MODULE_BASE : None,
+ SUP_MODULE_SEC : "PEI",
+ SUP_MODULE_PEI_CORE : "PEI",
+ SUP_MODULE_PEIM : "PEI",
+ SUP_MODULE_DXE_CORE : "DXE",
+ SUP_MODULE_DXE_DRIVER : "DXE",
+ SUP_MODULE_DXE_SMM_DRIVER : "DXE",
+ SUP_MODULE_DXE_RUNTIME_DRIVER: "DXE",
+ SUP_MODULE_DXE_SAL_DRIVER : "DXE",
+ SUP_MODULE_UEFI_DRIVER : "DXE",
+ SUP_MODULE_UEFI_APPLICATION : "DXE",
+ SUP_MODULE_SMM_CORE : "DXE",
+ SUP_MODULE_MM_STANDALONE : "MM",
+ SUP_MODULE_MM_CORE_STANDALONE : "MM",
+}
+
+## Convert dependency expression string into EFI internal representation
+#
+# DependencyExpression class is used to parse dependency expression string and
+# convert it into its binary form.
+#
+class DependencyExpression:
+
+ ArchProtocols = {
+ '665e3ff6-46cc-11d4-9a38-0090273fc14d', # 'gEfiBdsArchProtocolGuid'
+ '26baccb1-6f42-11d4-bce7-0080c73c8881', # 'gEfiCpuArchProtocolGuid'
+ '26baccb2-6f42-11d4-bce7-0080c73c8881', # 'gEfiMetronomeArchProtocolGuid'
+ '1da97072-bddc-4b30-99f1-72a0b56fff2a', # 'gEfiMonotonicCounterArchProtocolGuid'
+ '27cfac87-46cc-11d4-9a38-0090273fc14d', # 'gEfiRealTimeClockArchProtocolGuid'
+ '27cfac88-46cc-11d4-9a38-0090273fc14d', # 'gEfiResetArchProtocolGuid'
+ 'b7dfb4e1-052f-449f-87be-9818fc91b733', # 'gEfiRuntimeArchProtocolGuid'
+ 'a46423e3-4617-49f1-b9ff-d1bfa9115839', # 'gEfiSecurityArchProtocolGuid'
+ '26baccb3-6f42-11d4-bce7-0080c73c8881', # 'gEfiTimerArchProtocolGuid'
+ '6441f818-6362-4e44-b570-7dba31dd2453', # 'gEfiVariableWriteArchProtocolGuid'
+ '1e5668e2-8481-11d4-bcf1-0080c73c8881', # 'gEfiVariableArchProtocolGuid'
+ '665e3ff5-46cc-11d4-9a38-0090273fc14d' # 'gEfiWatchdogTimerArchProtocolGuid'
+ }
+
+ OpcodePriority = {
+ DEPEX_OPCODE_AND : 1,
+ DEPEX_OPCODE_OR : 1,
+ DEPEX_OPCODE_NOT : 2,
+ }
+
+ Opcode = {
+ "PEI" : {
+ DEPEX_OPCODE_PUSH : 0x02,
+ DEPEX_OPCODE_AND : 0x03,
+ DEPEX_OPCODE_OR : 0x04,
+ DEPEX_OPCODE_NOT : 0x05,
+ DEPEX_OPCODE_TRUE : 0x06,
+ DEPEX_OPCODE_FALSE : 0x07,
+ DEPEX_OPCODE_END : 0x08
+ },
+
+ "DXE" : {
+ DEPEX_OPCODE_BEFORE: 0x00,
+ DEPEX_OPCODE_AFTER : 0x01,
+ DEPEX_OPCODE_PUSH : 0x02,
+ DEPEX_OPCODE_AND : 0x03,
+ DEPEX_OPCODE_OR : 0x04,
+ DEPEX_OPCODE_NOT : 0x05,
+ DEPEX_OPCODE_TRUE : 0x06,
+ DEPEX_OPCODE_FALSE : 0x07,
+ DEPEX_OPCODE_END : 0x08,
+ DEPEX_OPCODE_SOR : 0x09
+ },
+
+ "MM" : {
+ DEPEX_OPCODE_BEFORE: 0x00,
+ DEPEX_OPCODE_AFTER : 0x01,
+ DEPEX_OPCODE_PUSH : 0x02,
+ DEPEX_OPCODE_AND : 0x03,
+ DEPEX_OPCODE_OR : 0x04,
+ DEPEX_OPCODE_NOT : 0x05,
+ DEPEX_OPCODE_TRUE : 0x06,
+ DEPEX_OPCODE_FALSE : 0x07,
+ DEPEX_OPCODE_END : 0x08,
+ DEPEX_OPCODE_SOR : 0x09
+ }
+ }
+
+ # all supported op codes and operands
+ SupportedOpcode = [DEPEX_OPCODE_BEFORE, DEPEX_OPCODE_AFTER, DEPEX_OPCODE_PUSH, DEPEX_OPCODE_AND, DEPEX_OPCODE_OR, DEPEX_OPCODE_NOT, DEPEX_OPCODE_END, DEPEX_OPCODE_SOR]
+ SupportedOperand = [DEPEX_OPCODE_TRUE, DEPEX_OPCODE_FALSE]
+
+ OpcodeWithSingleOperand = [DEPEX_OPCODE_NOT, DEPEX_OPCODE_BEFORE, DEPEX_OPCODE_AFTER]
+ OpcodeWithTwoOperand = [DEPEX_OPCODE_AND, DEPEX_OPCODE_OR]
+
+ # op code that should not be the last one
+ NonEndingOpcode = [DEPEX_OPCODE_AND, DEPEX_OPCODE_OR, DEPEX_OPCODE_NOT, DEPEX_OPCODE_SOR]
+ # op code must not present at the same time
+ ExclusiveOpcode = [DEPEX_OPCODE_BEFORE, DEPEX_OPCODE_AFTER]
+ # op code that should be the first one if it presents
+ AboveAllOpcode = [DEPEX_OPCODE_SOR, DEPEX_OPCODE_BEFORE, DEPEX_OPCODE_AFTER]
+
+ #
+ # open and close brace must be taken as individual tokens
+ #
+ TokenPattern = re.compile("(\(|\)|\{[^{}]+\{?[^{}]+\}?[ ]*\}|\w+)")
+
+ ## Constructor
+ #
+ # @param Expression The list or string of dependency expression
+ # @param ModuleType The type of the module using the dependency expression
+ #
+ def __init__(self, Expression, ModuleType, Optimize=False):
+ self.ModuleType = ModuleType
+ self.Phase = gType2Phase[ModuleType]
+ if isinstance(Expression, type([])):
+ self.ExpressionString = " ".join(Expression)
+ self.TokenList = Expression
+ else:
+ self.ExpressionString = Expression
+ self.GetExpressionTokenList()
+
+ self.PostfixNotation = []
+ self.OpcodeList = []
+
+ self.GetPostfixNotation()
+ self.ValidateOpcode()
+
+ EdkLogger.debug(EdkLogger.DEBUG_8, repr(self))
+ if Optimize:
+ self.Optimize()
+ EdkLogger.debug(EdkLogger.DEBUG_8, "\n Optimized: " + repr(self))
+
+ def __str__(self):
+ return " ".join(self.TokenList)
+
+ def __repr__(self):
+ WellForm = ''
+ for Token in self.PostfixNotation:
+ if Token in self.SupportedOpcode:
+ WellForm += "\n " + Token
+ else:
+ WellForm += ' ' + Token
+ return WellForm
+
+ ## Split the expression string into token list
+ def GetExpressionTokenList(self):
+ self.TokenList = self.TokenPattern.findall(self.ExpressionString)
+
+ ## Convert token list into postfix notation
+ def GetPostfixNotation(self):
+ Stack = []
+ LastToken = ''
+ for Token in self.TokenList:
+ if Token == "(":
+ if LastToken not in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before open parentheses",
+ ExtraData="Near %s" % LastToken)
+ Stack.append(Token)
+ elif Token == ")":
+ if '(' not in Stack:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: mismatched parentheses",
+ ExtraData=str(self))
+ elif LastToken in self.SupportedOpcode + ['', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operand before close parentheses",
+ ExtraData="Near %s" % LastToken)
+ while len(Stack) > 0:
+ if Stack[-1] == '(':
+ Stack.pop()
+ break
+ self.PostfixNotation.append(Stack.pop())
+ elif Token in self.OpcodePriority:
+ if Token == DEPEX_OPCODE_NOT:
+ if LastToken not in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before NOT",
+ ExtraData="Near %s" % LastToken)
+ elif LastToken in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operand before " + Token,
+ ExtraData="Near %s" % LastToken)
+
+ while len(Stack) > 0:
+ if Stack[-1] == "(" or self.OpcodePriority[Token] >= self.OpcodePriority[Stack[-1]]:
+ break
+ self.PostfixNotation.append(Stack.pop())
+ Stack.append(Token)
+ self.OpcodeList.append(Token)
+ else:
+ if Token not in self.SupportedOpcode:
+ # not OP, take it as GUID
+ if LastToken not in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before %s" % Token,
+ ExtraData="Near %s" % LastToken)
+ if len(self.OpcodeList) == 0 or self.OpcodeList[-1] not in self.ExclusiveOpcode:
+ if Token not in self.SupportedOperand:
+ self.PostfixNotation.append(DEPEX_OPCODE_PUSH)
+ # check if OP is valid in this phase
+ elif Token in self.Opcode[self.Phase]:
+ if Token == DEPEX_OPCODE_END:
+ break
+ self.OpcodeList.append(Token)
+ else:
+ EdkLogger.error("GenDepex", PARSER_ERROR,
+ "Opcode=%s doesn't supported in %s stage " % (Token, self.Phase),
+ ExtraData=str(self))
+ self.PostfixNotation.append(Token)
+ LastToken = Token
+
+ # there should not be parentheses in Stack
+ if '(' in Stack or ')' in Stack:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: mismatched parentheses",
+ ExtraData=str(self))
+ while len(Stack) > 0:
+ self.PostfixNotation.append(Stack.pop())
+ if self.PostfixNotation[-1] != DEPEX_OPCODE_END:
+ self.PostfixNotation.append(DEPEX_OPCODE_END)
+
+ ## Validate the dependency expression
+ def ValidateOpcode(self):
+ for Op in self.AboveAllOpcode:
+ if Op in self.PostfixNotation:
+ if Op != self.PostfixNotation[0]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "%s should be the first opcode in the expression" % Op,
+ ExtraData=str(self))
+ if len(self.PostfixNotation) < 3:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Missing operand for %s" % Op,
+ ExtraData=str(self))
+ for Op in self.ExclusiveOpcode:
+ if Op in self.OpcodeList:
+ if len(self.OpcodeList) > 1:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "%s should be the only opcode in the expression" % Op,
+ ExtraData=str(self))
+ if len(self.PostfixNotation) < 3:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Missing operand for %s" % Op,
+ ExtraData=str(self))
+ if self.TokenList[-1] != DEPEX_OPCODE_END and self.TokenList[-1] in self.NonEndingOpcode:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Extra %s at the end of the dependency expression" % self.TokenList[-1],
+ ExtraData=str(self))
+ if self.TokenList[-1] == DEPEX_OPCODE_END and self.TokenList[-2] in self.NonEndingOpcode:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Extra %s at the end of the dependency expression" % self.TokenList[-2],
+ ExtraData=str(self))
+ if DEPEX_OPCODE_END in self.TokenList and DEPEX_OPCODE_END != self.TokenList[-1]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Extra expressions after END",
+ ExtraData=str(self))
+
+ ## Simply optimize the dependency expression by removing duplicated operands
+ def Optimize(self):
+ OpcodeSet = set(self.OpcodeList)
+ # if there are isn't one in the set, return
+ if len(OpcodeSet) != 1:
+ return
+ Op = OpcodeSet.pop()
+ #if Op isn't either OR or AND, return
+ if Op not in [DEPEX_OPCODE_AND, DEPEX_OPCODE_OR]:
+ return
+ NewOperand = []
+ AllOperand = set()
+ for Token in self.PostfixNotation:
+ if Token in self.SupportedOpcode or Token in NewOperand:
+ continue
+ AllOperand.add(Token)
+ if Token == DEPEX_OPCODE_TRUE:
+ if Op == DEPEX_OPCODE_AND:
+ continue
+ else:
+ NewOperand.append(Token)
+ break
+ elif Token == DEPEX_OPCODE_FALSE:
+ if Op == DEPEX_OPCODE_OR:
+ continue
+ else:
+ NewOperand.append(Token)
+ break
+ NewOperand.append(Token)
+
+ # don't generate depex if only TRUE operand left
+ if self.ModuleType == SUP_MODULE_PEIM and len(NewOperand) == 1 and NewOperand[0] == DEPEX_OPCODE_TRUE:
+ self.PostfixNotation = []
+ return
+
+ # don't generate depex if all operands are architecture protocols
+ if self.ModuleType in [SUP_MODULE_UEFI_DRIVER, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_MM_STANDALONE] and \
+ Op == DEPEX_OPCODE_AND and \
+ self.ArchProtocols == set(GuidStructureStringToGuidString(Guid) for Guid in AllOperand):
+ self.PostfixNotation = []
+ return
+
+ if len(NewOperand) == 0:
+ self.TokenList = list(AllOperand)
+ else:
+ self.TokenList = []
+ while True:
+ self.TokenList.append(NewOperand.pop(0))
+ if NewOperand == []:
+ break
+ self.TokenList.append(Op)
+ self.PostfixNotation = []
+ self.GetPostfixNotation()
+
+
+ ## Convert a GUID value in C structure format into its binary form
+ #
+ # @param Guid The GUID value in C structure format
+ #
+ # @retval array The byte array representing the GUID value
+ #
+ def GetGuidValue(self, Guid):
+ GuidValueString = Guid.replace("{", "").replace("}", "").replace(" ", "")
+ GuidValueList = GuidValueString.split(",")
+ if len(GuidValueList) != 11 and len(GuidValueList) == 16:
+ GuidValueString = GuidStringToGuidStructureString(GuidStructureByteArrayToGuidString(Guid))
+ GuidValueString = GuidValueString.replace("{", "").replace("}", "").replace(" ", "")
+ GuidValueList = GuidValueString.split(",")
+ if len(GuidValueList) != 11:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid GUID value string or opcode: %s" % Guid)
+ return pack("1I2H8B", *(int(value, 16) for value in GuidValueList))
+
+ ## Save the binary form of dependency expression in file
+ #
+ # @param File The path of file. If None is given, put the data on console
+ #
+ # @retval True If the file doesn't exist or file is changed
+ # @retval False If file exists and is not changed.
+ #
+ def Generate(self, File=None):
+ Buffer = BytesIO()
+ if len(self.PostfixNotation) == 0:
+ return False
+
+ for Item in self.PostfixNotation:
+ if Item in self.Opcode[self.Phase]:
+ Buffer.write(pack("B", self.Opcode[self.Phase][Item]))
+ elif Item in self.SupportedOpcode:
+ EdkLogger.error("GenDepex", FORMAT_INVALID,
+ "Opcode [%s] is not expected in %s phase" % (Item, self.Phase),
+ ExtraData=self.ExpressionString)
+ else:
+ Buffer.write(self.GetGuidValue(Item))
+
+ FilePath = ""
+ FileChangeFlag = True
+ if File is None:
+ sys.stdout.write(Buffer.getvalue())
+ FilePath = "STDOUT"
+ else:
+ FileChangeFlag = SaveFileOnChange(File, Buffer.getvalue(), True)
+
+ Buffer.close()
+ return FileChangeFlag
+
+versionNumber = ("0.04" + " " + gBUILD_VERSION)
+__version__ = "%prog Version " + versionNumber
+__copyright__ = "Copyright (c) 2007-2018, Intel Corporation All rights reserved."
+__usage__ = "%prog [options] [dependency_expression_file]"
+
+## Parse command line options
+#
+# @retval OptionParser
+#
+def GetOptions():
+ from optparse import OptionParser
+
+ Parser = OptionParser(description=__copyright__, version=__version__, usage=__usage__)
+
+ Parser.add_option("-o", "--output", dest="OutputFile", default=None, metavar="FILE",
+ help="Specify the name of depex file to be generated")
+ Parser.add_option("-t", "--module-type", dest="ModuleType", default=None,
+ help="The type of module for which the dependency expression serves")
+ Parser.add_option("-e", "--dependency-expression", dest="Expression", default="",
+ help="The string of dependency expression. If this option presents, the input file will be ignored.")
+ Parser.add_option("-m", "--optimize", dest="Optimize", default=False, action="store_true",
+ help="Do some simple optimization on the expression.")
+ Parser.add_option("-v", "--verbose", dest="verbose", default=False, action="store_true",
+ help="build with verbose information")
+ Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
+ Parser.add_option("-q", "--quiet", dest="quiet", default=False, action="store_true",
+ help="build with little information")
+
+ return Parser.parse_args()
+
+
+## Entrance method
+#
+# @retval 0 Tool was successful
+# @retval 1 Tool failed
+#
+def Main():
+ EdkLogger.Initialize()
+ Option, Input = GetOptions()
+
+ # Set log level
+ if Option.quiet:
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ elif Option.verbose:
+ EdkLogger.SetLevel(EdkLogger.VERBOSE)
+ elif Option.debug is not None:
+ EdkLogger.SetLevel(Option.debug + 1)
+ else:
+ EdkLogger.SetLevel(EdkLogger.INFO)
+
+ try:
+ if Option.ModuleType is None or Option.ModuleType not in gType2Phase:
+ EdkLogger.error("GenDepex", OPTION_MISSING, "Module type is not specified or supported")
+
+ DxsFile = ''
+ if len(Input) > 0 and Option.Expression == "":
+ DxsFile = Input[0]
+ DxsString = open(DxsFile, 'r').read().replace("\n", " ").replace("\r", " ")
+ DxsString = gStartClosePattern.sub("\\1", DxsString)
+ elif Option.Expression != "":
+ if Option.Expression[0] == '"':
+ DxsString = Option.Expression[1:-1]
+ else:
+ DxsString = Option.Expression
+ else:
+ EdkLogger.error("GenDepex", OPTION_MISSING, "No expression string or file given")
+
+ Dpx = DependencyExpression(DxsString, Option.ModuleType, Option.Optimize)
+ if Option.OutputFile is not None:
+ FileChangeFlag = Dpx.Generate(Option.OutputFile)
+ if not FileChangeFlag and DxsFile:
+ #
+ # Touch the output file if its time stamp is older than the original
+ # DXS file to avoid re-invoke this tool for the dependency check in build rule.
+ #
+ if os.stat(DxsFile)[8] > os.stat(Option.OutputFile)[8]:
+ os.utime(Option.OutputFile, None)
+ else:
+ Dpx.Generate()
+ except BaseException as X:
+ EdkLogger.quiet("")
+ if Option is not None and Option.debug is not None:
+ EdkLogger.quiet(traceback.format_exc())
+ else:
+ EdkLogger.quiet(str(X))
+ return 1
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(Main())
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenMake.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenMake.py
new file mode 100755
index 00000000..130557de
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenMake.py
@@ -0,0 +1,1810 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2020, ARM Limited. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+from __future__ import absolute_import
+import Common.LongFilePathOs as os
+import sys
+import string
+import re
+import os.path as path
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from Common.BuildToolError import *
+from Common.Misc import *
+from Common.StringUtils import *
+from .BuildEngine import *
+import Common.GlobalData as GlobalData
+from collections import OrderedDict
+from Common.DataType import TAB_COMPILER_MSFT
+
+## Regular expression for finding header file inclusions
+gIncludePattern = re.compile(r"^[ \t]*[#%]?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE | re.UNICODE | re.IGNORECASE)
+
+## Regular expression for matching macro used in header file inclusion
+gMacroPattern = re.compile("([_A-Z][_A-Z0-9]*)[ \t]*\((.+)\)", re.UNICODE)
+
+gIsFileMap = {}
+
+## pattern for include style in Edk.x code
+gProtocolDefinition = "Protocol/%(HeaderKey)s/%(HeaderKey)s.h"
+gGuidDefinition = "Guid/%(HeaderKey)s/%(HeaderKey)s.h"
+gArchProtocolDefinition = "ArchProtocol/%(HeaderKey)s/%(HeaderKey)s.h"
+gPpiDefinition = "Ppi/%(HeaderKey)s/%(HeaderKey)s.h"
+gIncludeMacroConversion = {
+ "EFI_PROTOCOL_DEFINITION" : gProtocolDefinition,
+ "EFI_GUID_DEFINITION" : gGuidDefinition,
+ "EFI_ARCH_PROTOCOL_DEFINITION" : gArchProtocolDefinition,
+ "EFI_PROTOCOL_PRODUCER" : gProtocolDefinition,
+ "EFI_PROTOCOL_CONSUMER" : gProtocolDefinition,
+ "EFI_PROTOCOL_DEPENDENCY" : gProtocolDefinition,
+ "EFI_ARCH_PROTOCOL_PRODUCER" : gArchProtocolDefinition,
+ "EFI_ARCH_PROTOCOL_CONSUMER" : gArchProtocolDefinition,
+ "EFI_ARCH_PROTOCOL_DEPENDENCY" : gArchProtocolDefinition,
+ "EFI_PPI_DEFINITION" : gPpiDefinition,
+ "EFI_PPI_PRODUCER" : gPpiDefinition,
+ "EFI_PPI_CONSUMER" : gPpiDefinition,
+ "EFI_PPI_DEPENDENCY" : gPpiDefinition,
+}
+
+NMAKE_FILETYPE = "nmake"
+GMAKE_FILETYPE = "gmake"
+WIN32_PLATFORM = "win32"
+POSIX_PLATFORM = "posix"
+
+## BuildFile class
+#
+# This base class encapsules build file and its generation. It uses template to generate
+# the content of build file. The content of build file will be got from AutoGen objects.
+#
+class BuildFile(object):
+ ## template used to generate the build file (i.e. makefile if using make)
+ _TEMPLATE_ = TemplateString('')
+
+ _DEFAULT_FILE_NAME_ = "Makefile"
+
+ ## default file name for each type of build file
+ _FILE_NAME_ = {
+ NMAKE_FILETYPE : "Makefile",
+ GMAKE_FILETYPE : "GNUmakefile"
+ }
+
+ # Get Makefile name.
+ def getMakefileName(self):
+ if not self._FileType:
+ return self._DEFAULT_FILE_NAME_
+ else:
+ return self._FILE_NAME_[self._FileType]
+
+ ## Fixed header string for makefile
+ _MAKEFILE_HEADER = '''#
+# DO NOT EDIT
+# This file is auto-generated by build utility
+#
+# Module Name:
+#
+# %s
+#
+# Abstract:
+#
+# Auto-generated makefile for building modules, libraries or platform
+#
+ '''
+
+ ## Header string for each type of build file
+ _FILE_HEADER_ = {
+ NMAKE_FILETYPE : _MAKEFILE_HEADER % _FILE_NAME_[NMAKE_FILETYPE],
+ GMAKE_FILETYPE : _MAKEFILE_HEADER % _FILE_NAME_[GMAKE_FILETYPE]
+ }
+
+ ## shell commands which can be used in build file in the form of macro
+ # $(CP) copy file command
+ # $(MV) move file command
+ # $(RM) remove file command
+ # $(MD) create dir command
+ # $(RD) remove dir command
+ #
+ _SHELL_CMD_ = {
+ WIN32_PLATFORM : {
+ "CP" : "copy /y",
+ "MV" : "move /y",
+ "RM" : "del /f /q",
+ "MD" : "mkdir",
+ "RD" : "rmdir /s /q",
+ },
+
+ POSIX_PLATFORM : {
+ "CP" : "cp -f",
+ "MV" : "mv -f",
+ "RM" : "rm -f",
+ "MD" : "mkdir -p",
+ "RD" : "rm -r -f",
+ }
+ }
+
+ ## directory separator
+ _SEP_ = {
+ WIN32_PLATFORM : "\\",
+ POSIX_PLATFORM : "/"
+ }
+
+ ## directory creation template
+ _MD_TEMPLATE_ = {
+ WIN32_PLATFORM : 'if not exist %(dir)s $(MD) %(dir)s',
+ POSIX_PLATFORM : "$(MD) %(dir)s"
+ }
+
+ ## directory removal template
+ _RD_TEMPLATE_ = {
+ WIN32_PLATFORM : 'if exist %(dir)s $(RD) %(dir)s',
+ POSIX_PLATFORM : "$(RD) %(dir)s"
+ }
+ ## cp if exist
+ _CP_TEMPLATE_ = {
+ WIN32_PLATFORM : 'if exist %(Src)s $(CP) %(Src)s %(Dst)s',
+ POSIX_PLATFORM : "test -f %(Src)s && $(CP) %(Src)s %(Dst)s"
+ }
+
+ _CD_TEMPLATE_ = {
+ WIN32_PLATFORM : 'if exist %(dir)s cd %(dir)s',
+ POSIX_PLATFORM : "test -e %(dir)s && cd %(dir)s"
+ }
+
+ _MAKE_TEMPLATE_ = {
+ WIN32_PLATFORM : 'if exist %(file)s "$(MAKE)" $(MAKE_FLAGS) -f %(file)s',
+ POSIX_PLATFORM : 'test -e %(file)s && "$(MAKE)" $(MAKE_FLAGS) -f %(file)s'
+ }
+
+ _INCLUDE_CMD_ = {
+ NMAKE_FILETYPE : '!INCLUDE',
+ GMAKE_FILETYPE : "include"
+ }
+
+ _INC_FLAG_ = {TAB_COMPILER_MSFT : "/I", "GCC" : "-I", "INTEL" : "-I", "RVCT" : "-I", "NASM" : "-I"}
+
+ ## Constructor of BuildFile
+ #
+ # @param AutoGenObject Object of AutoGen class
+ #
+ def __init__(self, AutoGenObject):
+ self._AutoGenObject = AutoGenObject
+
+ MakePath = AutoGenObject.BuildOption.get('MAKE', {}).get('PATH')
+ if not MakePath:
+ self._FileType = ""
+ elif "nmake" in MakePath:
+ self._FileType = NMAKE_FILETYPE
+ else:
+ self._FileType = "gmake"
+
+ if sys.platform == "win32":
+ self._Platform = WIN32_PLATFORM
+ else:
+ self._Platform = POSIX_PLATFORM
+
+ ## Create build file.
+ #
+ # Only nmake and gmake are supported.
+ #
+ # @retval TRUE The build file is created or re-created successfully.
+ # @retval FALSE The build file exists and is the same as the one to be generated.
+ #
+ def Generate(self):
+ FileContent = self._TEMPLATE_.Replace(self._TemplateDict)
+ FileName = self.getMakefileName()
+ if not os.path.exists(os.path.join(self._AutoGenObject.MakeFileDir, "deps.txt")):
+ with open(os.path.join(self._AutoGenObject.MakeFileDir, "deps.txt"),"w+") as fd:
+ fd.write("")
+ if not os.path.exists(os.path.join(self._AutoGenObject.MakeFileDir, "dependency")):
+ with open(os.path.join(self._AutoGenObject.MakeFileDir, "dependency"),"w+") as fd:
+ fd.write("")
+ if not os.path.exists(os.path.join(self._AutoGenObject.MakeFileDir, "deps_target")):
+ with open(os.path.join(self._AutoGenObject.MakeFileDir, "deps_target"),"w+") as fd:
+ fd.write("")
+ return SaveFileOnChange(os.path.join(self._AutoGenObject.MakeFileDir, FileName), FileContent, False)
+
+ ## Return a list of directory creation command string
+ #
+ # @param DirList The list of directory to be created
+ #
+ # @retval list The directory creation command list
+ #
+ def GetCreateDirectoryCommand(self, DirList):
+ return [self._MD_TEMPLATE_[self._Platform] % {'dir':Dir} for Dir in DirList]
+
+ ## Return a list of directory removal command string
+ #
+ # @param DirList The list of directory to be removed
+ #
+ # @retval list The directory removal command list
+ #
+ def GetRemoveDirectoryCommand(self, DirList):
+ return [self._RD_TEMPLATE_[self._Platform] % {'dir':Dir} for Dir in DirList]
+
+ def PlaceMacro(self, Path, MacroDefinitions=None):
+ if Path.startswith("$("):
+ return Path
+ else:
+ if MacroDefinitions is None:
+ MacroDefinitions = {}
+ PathLength = len(Path)
+ for MacroName in MacroDefinitions:
+ MacroValue = MacroDefinitions[MacroName]
+ MacroValueLength = len(MacroValue)
+ if MacroValueLength == 0:
+ continue
+ if MacroValueLength <= PathLength and Path.startswith(MacroValue):
+ Path = "$(%s)%s" % (MacroName, Path[MacroValueLength:])
+ break
+ return Path
+
+## ModuleMakefile class
+#
+# This class encapsules makefie and its generation for module. It uses template to generate
+# the content of makefile. The content of makefile will be got from ModuleAutoGen object.
+#
+class ModuleMakefile(BuildFile):
+ ## template used to generate the makefile for module
+ _TEMPLATE_ = TemplateString('''\
+${makefile_header}
+
+#
+# Platform Macro Definition
+#
+PLATFORM_NAME = ${platform_name}
+PLATFORM_GUID = ${platform_guid}
+PLATFORM_VERSION = ${platform_version}
+PLATFORM_RELATIVE_DIR = ${platform_relative_directory}
+PLATFORM_DIR = ${platform_dir}
+PLATFORM_OUTPUT_DIR = ${platform_output_directory}
+
+#
+# Module Macro Definition
+#
+MODULE_NAME = ${module_name}
+MODULE_GUID = ${module_guid}
+MODULE_NAME_GUID = ${module_name_guid}
+MODULE_VERSION = ${module_version}
+MODULE_TYPE = ${module_type}
+MODULE_FILE = ${module_file}
+MODULE_FILE_BASE_NAME = ${module_file_base_name}
+BASE_NAME = $(MODULE_NAME)
+MODULE_RELATIVE_DIR = ${module_relative_directory}
+PACKAGE_RELATIVE_DIR = ${package_relative_directory}
+MODULE_DIR = ${module_dir}
+FFS_OUTPUT_DIR = ${ffs_output_directory}
+
+MODULE_ENTRY_POINT = ${module_entry_point}
+ARCH_ENTRY_POINT = ${arch_entry_point}
+IMAGE_ENTRY_POINT = ${image_entry_point}
+
+${BEGIN}${module_extra_defines}
+${END}
+#
+# Build Configuration Macro Definition
+#
+ARCH = ${architecture}
+TOOLCHAIN = ${toolchain_tag}
+TOOLCHAIN_TAG = ${toolchain_tag}
+TARGET = ${build_target}
+
+#
+# Build Directory Macro Definition
+#
+# PLATFORM_BUILD_DIR = ${platform_build_directory}
+BUILD_DIR = ${platform_build_directory}
+BIN_DIR = $(BUILD_DIR)${separator}${architecture}
+LIB_DIR = $(BIN_DIR)
+MODULE_BUILD_DIR = ${module_build_directory}
+OUTPUT_DIR = ${module_output_directory}
+DEBUG_DIR = ${module_debug_directory}
+DEST_DIR_OUTPUT = $(OUTPUT_DIR)
+DEST_DIR_DEBUG = $(DEBUG_DIR)
+
+#
+# Shell Command Macro
+#
+${BEGIN}${shell_command_code} = ${shell_command}
+${END}
+
+#
+# Tools definitions specific to this module
+#
+${BEGIN}${module_tool_definitions}
+${END}
+MAKE_FILE = ${makefile_path}
+
+#
+# Build Macro
+#
+${BEGIN}${file_macro}
+${END}
+
+#
+# Overridable Target Macro Definitions
+#
+FORCE_REBUILD = force_build
+INIT_TARGET = init
+PCH_TARGET =
+BC_TARGET = ${BEGIN}${backward_compatible_target} ${END}
+CODA_TARGET = ${BEGIN}${remaining_build_target} \\
+ ${END}
+
+#
+# Default target, which will build dependent libraries in addition to source files
+#
+
+all: mbuild
+
+
+#
+# Target used when called from platform makefile, which will bypass the build of dependent libraries
+#
+
+pbuild: $(INIT_TARGET) $(BC_TARGET) $(PCH_TARGET) $(CODA_TARGET)
+
+#
+# ModuleTarget
+#
+
+mbuild: $(INIT_TARGET) $(BC_TARGET) gen_libs $(PCH_TARGET) $(CODA_TARGET)
+
+#
+# Build Target used in multi-thread build mode, which will bypass the init and gen_libs targets
+#
+
+tbuild: $(BC_TARGET) $(PCH_TARGET) $(CODA_TARGET)
+
+#
+# Phony target which is used to force executing commands for a target
+#
+force_build:
+\t-@
+
+#
+# Target to update the FD
+#
+
+fds: mbuild gen_fds
+
+#
+# Initialization target: print build information and create necessary directories
+#
+init: info dirs
+
+info:
+\t-@echo Building ... $(MODULE_DIR)${separator}$(MODULE_FILE) [$(ARCH)]
+
+dirs:
+${BEGIN}\t-@${create_directory_command}\n${END}
+
+strdefs:
+\t-@$(CP) $(DEBUG_DIR)${separator}AutoGen.h $(DEBUG_DIR)${separator}$(MODULE_NAME)StrDefs.h
+
+#
+# GenLibsTarget
+#
+gen_libs:
+\t${BEGIN}@"$(MAKE)" $(MAKE_FLAGS) -f ${dependent_library_build_directory}${separator}${makefile_name}
+\t${END}@cd $(MODULE_BUILD_DIR)
+
+#
+# Build Flash Device Image
+#
+gen_fds:
+\t@"$(MAKE)" $(MAKE_FLAGS) -f $(BUILD_DIR)${separator}${makefile_name} fds
+\t@cd $(MODULE_BUILD_DIR)
+
+${INCLUDETAG}
+
+#
+# Individual Object Build Targets
+#
+${BEGIN}${file_build_target}
+${END}
+
+#
+# clean all intermediate files
+#
+clean:
+\t${BEGIN}${clean_command}
+\t${END}\t$(RM) AutoGenTimeStamp
+
+#
+# clean all generated files
+#
+cleanall:
+${BEGIN}\t${cleanall_command}
+${END}\t$(RM) *.pdb *.idb > NUL 2>&1
+\t$(RM) $(BIN_DIR)${separator}$(MODULE_NAME).efi
+\t$(RM) AutoGenTimeStamp
+
+#
+# clean all dependent libraries built
+#
+cleanlib:
+\t${BEGIN}-@${library_build_command} cleanall
+\t${END}@cd $(MODULE_BUILD_DIR)\n\n''')
+
+ _FILE_MACRO_TEMPLATE = TemplateString("${macro_name} = ${BEGIN} \\\n ${source_file}${END}\n")
+ _BUILD_TARGET_TEMPLATE = TemplateString("${BEGIN}${target} : ${deps}\n${END}\t${cmd}\n")
+
+ ## Constructor of ModuleMakefile
+ #
+ # @param ModuleAutoGen Object of ModuleAutoGen class
+ #
+ def __init__(self, ModuleAutoGen):
+ BuildFile.__init__(self, ModuleAutoGen)
+ self.PlatformInfo = self._AutoGenObject.PlatformInfo
+
+ self.ResultFileList = []
+ self.IntermediateDirectoryList = ["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]
+
+ self.FileBuildTargetList = [] # [(src, target string)]
+ self.BuildTargetList = [] # [target string]
+ self.PendingBuildTargetList = [] # [FileBuildRule objects]
+ self.CommonFileDependency = []
+ self.FileListMacros = {}
+ self.ListFileMacros = {}
+ self.ObjTargetDict = OrderedDict()
+ self.FileCache = {}
+ self.LibraryBuildCommandList = []
+ self.LibraryFileList = []
+ self.LibraryMakefileList = []
+ self.LibraryBuildDirectoryList = []
+ self.SystemLibraryList = []
+ self.Macros = OrderedDict()
+ self.Macros["OUTPUT_DIR" ] = self._AutoGenObject.Macros["OUTPUT_DIR"]
+ self.Macros["DEBUG_DIR" ] = self._AutoGenObject.Macros["DEBUG_DIR"]
+ self.Macros["MODULE_BUILD_DIR"] = self._AutoGenObject.Macros["MODULE_BUILD_DIR"]
+ self.Macros["BIN_DIR" ] = self._AutoGenObject.Macros["BIN_DIR"]
+ self.Macros["BUILD_DIR" ] = self._AutoGenObject.Macros["BUILD_DIR"]
+ self.Macros["WORKSPACE" ] = self._AutoGenObject.Macros["WORKSPACE"]
+ self.Macros["FFS_OUTPUT_DIR" ] = self._AutoGenObject.Macros["FFS_OUTPUT_DIR"]
+ self.GenFfsList = ModuleAutoGen.GenFfsList
+ self.MacroList = ['FFS_OUTPUT_DIR', 'MODULE_GUID', 'OUTPUT_DIR']
+ self.FfsOutputFileList = []
+ self.DependencyHeaderFileSet = set()
+
+ # Compose a dict object containing information used to do replacement in template
+ @property
+ def _TemplateDict(self):
+ MyAgo = self._AutoGenObject
+ Separator = self._SEP_[self._Platform]
+
+ # break build if no source files and binary files are found
+ if len(MyAgo.SourceFileList) == 0 and len(MyAgo.BinaryFileList) == 0:
+ EdkLogger.error("build", AUTOGEN_ERROR, "No files to be built in module [%s, %s, %s]"
+ % (MyAgo.BuildTarget, MyAgo.ToolChain, MyAgo.Arch),
+ ExtraData="[%s]" % str(MyAgo))
+
+ # convert dependent libraries to build command
+ self.ProcessDependentLibrary()
+ if len(MyAgo.Module.ModuleEntryPointList) > 0:
+ ModuleEntryPoint = MyAgo.Module.ModuleEntryPointList[0]
+ else:
+ ModuleEntryPoint = "_ModuleEntryPoint"
+
+ ArchEntryPoint = ModuleEntryPoint
+
+ if MyAgo.Arch == "EBC":
+ # EBC compiler always use "EfiStart" as entry point. Only applies to EdkII modules
+ ImageEntryPoint = "EfiStart"
+ else:
+ # EdkII modules always use "_ModuleEntryPoint" as entry point
+ ImageEntryPoint = "_ModuleEntryPoint"
+
+ for k, v in MyAgo.Module.Defines.items():
+ if k not in MyAgo.Macros:
+ MyAgo.Macros[k] = v
+
+ if 'MODULE_ENTRY_POINT' not in MyAgo.Macros:
+ MyAgo.Macros['MODULE_ENTRY_POINT'] = ModuleEntryPoint
+ if 'ARCH_ENTRY_POINT' not in MyAgo.Macros:
+ MyAgo.Macros['ARCH_ENTRY_POINT'] = ArchEntryPoint
+ if 'IMAGE_ENTRY_POINT' not in MyAgo.Macros:
+ MyAgo.Macros['IMAGE_ENTRY_POINT'] = ImageEntryPoint
+
+ PCI_COMPRESS_Flag = False
+ for k, v in MyAgo.Module.Defines.items():
+ if 'PCI_COMPRESS' == k and 'TRUE' == v:
+ PCI_COMPRESS_Flag = True
+
+ # tools definitions
+ ToolsDef = []
+ IncPrefix = self._INC_FLAG_[MyAgo.ToolChainFamily]
+ for Tool in sorted(list(MyAgo.BuildOption)):
+ Appended = False
+ for Attr in sorted(list(MyAgo.BuildOption[Tool])):
+ Value = MyAgo.BuildOption[Tool][Attr]
+ if Attr == "FAMILY":
+ continue
+ elif Attr == "PATH":
+ ToolsDef.append("%s = %s" % (Tool, Value))
+ Appended = True
+ else:
+ # Don't generate MAKE_FLAGS in makefile. It's put in environment variable.
+ if Tool == "MAKE":
+ continue
+ # Remove duplicated include path, if any
+ if Attr == "FLAGS":
+ Value = RemoveDupOption(Value, IncPrefix, MyAgo.IncludePathList)
+ if Tool == "OPTROM" and PCI_COMPRESS_Flag:
+ ValueList = Value.split()
+ if ValueList:
+ for i, v in enumerate(ValueList):
+ if '-e' == v:
+ ValueList[i] = '-ec'
+ Value = ' '.join(ValueList)
+
+ ToolsDef.append("%s_%s = %s" % (Tool, Attr, Value))
+ Appended = True
+ if Appended:
+ ToolsDef.append("")
+
+ # generate the Response file and Response flag
+ RespDict = self.CommandExceedLimit()
+ RespFileList = os.path.join(MyAgo.OutputDir, 'respfilelist.txt')
+ if RespDict:
+ RespFileListContent = ''
+ for Resp in RespDict:
+ RespFile = os.path.join(MyAgo.OutputDir, str(Resp).lower() + '.txt')
+ StrList = RespDict[Resp].split(' ')
+ UnexpandMacro = []
+ NewStr = []
+ for Str in StrList:
+ if '$' in Str or '-MMD' in Str or '-MF' in Str:
+ UnexpandMacro.append(Str)
+ else:
+ NewStr.append(Str)
+ UnexpandMacroStr = ' '.join(UnexpandMacro)
+ NewRespStr = ' '.join(NewStr)
+ SaveFileOnChange(RespFile, NewRespStr, False)
+ ToolsDef.append("%s = %s" % (Resp, UnexpandMacroStr + ' @' + RespFile))
+ RespFileListContent += '@' + RespFile + TAB_LINE_BREAK
+ RespFileListContent += NewRespStr + TAB_LINE_BREAK
+ SaveFileOnChange(RespFileList, RespFileListContent, False)
+ else:
+ if os.path.exists(RespFileList):
+ os.remove(RespFileList)
+
+ # convert source files and binary files to build targets
+ self.ResultFileList = [str(T.Target) for T in MyAgo.CodaTargetList]
+ if len(self.ResultFileList) == 0 and len(MyAgo.SourceFileList) != 0:
+ EdkLogger.error("build", AUTOGEN_ERROR, "Nothing to build",
+ ExtraData="[%s]" % str(MyAgo))
+
+ self.ProcessBuildTargetList(MyAgo.OutputDir, ToolsDef)
+ self.ParserGenerateFfsCmd()
+
+ # Generate macros used to represent input files
+ FileMacroList = [] # macro name = file list
+ for FileListMacro in self.FileListMacros:
+ FileMacro = self._FILE_MACRO_TEMPLATE.Replace(
+ {
+ "macro_name" : FileListMacro,
+ "source_file" : self.FileListMacros[FileListMacro]
+ }
+ )
+ FileMacroList.append(FileMacro)
+
+ # INC_LIST is special
+ FileMacro = ""
+ IncludePathList = []
+ for P in MyAgo.IncludePathList:
+ IncludePathList.append(IncPrefix + self.PlaceMacro(P, self.Macros))
+ if FileBuildRule.INC_LIST_MACRO in self.ListFileMacros:
+ self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix + P)
+ FileMacro += self._FILE_MACRO_TEMPLATE.Replace(
+ {
+ "macro_name" : "INC",
+ "source_file" : IncludePathList
+ }
+ )
+ FileMacroList.append(FileMacro)
+ # Add support when compiling .nasm source files
+ IncludePathList = []
+ asmsource = [item for item in MyAgo.SourceFileList if item.File.upper().endswith((".NASM",".ASM",".NASMB","S"))]
+ if asmsource:
+ for P in MyAgo.IncludePathList:
+ IncludePath = self._INC_FLAG_['NASM'] + self.PlaceMacro(P, self.Macros)
+ if IncludePath.endswith(os.sep):
+ IncludePath = IncludePath.rstrip(os.sep)
+ # When compiling .nasm files, need to add a literal backslash at each path.
+ # In nmake makfiles, a trailing literal backslash must be escaped with a caret ('^').
+ # It is otherwise replaced with a space (' '). This is not necessary for GNU makfefiles.
+ if P == MyAgo.IncludePathList[-1] and self._Platform == WIN32_PLATFORM and self._FileType == NMAKE_FILETYPE:
+ IncludePath = ''.join([IncludePath, '^', os.sep])
+ else:
+ IncludePath = os.path.join(IncludePath, '')
+ IncludePathList.append(IncludePath)
+ FileMacroList.append(self._FILE_MACRO_TEMPLATE.Replace({"macro_name": "NASM_INC", "source_file": IncludePathList}))
+
+ # Generate macros used to represent files containing list of input files
+ for ListFileMacro in self.ListFileMacros:
+ ListFileName = os.path.join(MyAgo.OutputDir, "%s.lst" % ListFileMacro.lower()[:len(ListFileMacro) - 5])
+ FileMacroList.append("%s = %s" % (ListFileMacro, ListFileName))
+ SaveFileOnChange(
+ ListFileName,
+ "\n".join(self.ListFileMacros[ListFileMacro]),
+ False
+ )
+
+ # Generate objlist used to create .obj file
+ for Type in self.ObjTargetDict:
+ NewLine = ' '.join(list(self.ObjTargetDict[Type]))
+ FileMacroList.append("OBJLIST_%s = %s" % (list(self.ObjTargetDict.keys()).index(Type), NewLine))
+
+ BcTargetList = []
+
+ MakefileName = self.getMakefileName()
+ LibraryMakeCommandList = []
+ for D in self.LibraryBuildDirectoryList:
+ Command = self._MAKE_TEMPLATE_[self._Platform] % {"file":os.path.join(D, MakefileName)}
+ LibraryMakeCommandList.append(Command)
+
+ package_rel_dir = MyAgo.SourceDir
+ current_dir = self.Macros["WORKSPACE"]
+ found = False
+ while not found and os.sep in package_rel_dir:
+ index = package_rel_dir.index(os.sep)
+ current_dir = mws.join(current_dir, package_rel_dir[:index])
+ if os.path.exists(current_dir):
+ for fl in os.listdir(current_dir):
+ if fl.endswith('.dec'):
+ found = True
+ break
+ package_rel_dir = package_rel_dir[index + 1:]
+
+ MakefileTemplateDict = {
+ "makefile_header" : self._FILE_HEADER_[self._FileType],
+ "makefile_path" : os.path.join("$(MODULE_BUILD_DIR)", MakefileName),
+ "makefile_name" : MakefileName,
+ "platform_name" : self.PlatformInfo.Name,
+ "platform_guid" : self.PlatformInfo.Guid,
+ "platform_version" : self.PlatformInfo.Version,
+ "platform_relative_directory": self.PlatformInfo.SourceDir,
+ "platform_output_directory" : self.PlatformInfo.OutputDir,
+ "ffs_output_directory" : MyAgo.Macros["FFS_OUTPUT_DIR"],
+ "platform_dir" : MyAgo.Macros["PLATFORM_DIR"],
+
+ "module_name" : MyAgo.Name,
+ "module_guid" : MyAgo.Guid,
+ "module_name_guid" : MyAgo.UniqueBaseName,
+ "module_version" : MyAgo.Version,
+ "module_type" : MyAgo.ModuleType,
+ "module_file" : MyAgo.MetaFile.Name,
+ "module_file_base_name" : MyAgo.MetaFile.BaseName,
+ "module_relative_directory" : MyAgo.SourceDir,
+ "module_dir" : mws.join (self.Macros["WORKSPACE"], MyAgo.SourceDir),
+ "package_relative_directory": package_rel_dir,
+ "module_extra_defines" : ["%s = %s" % (k, v) for k, v in MyAgo.Module.Defines.items()],
+
+ "architecture" : MyAgo.Arch,
+ "toolchain_tag" : MyAgo.ToolChain,
+ "build_target" : MyAgo.BuildTarget,
+
+ "platform_build_directory" : self.PlatformInfo.BuildDir,
+ "module_build_directory" : MyAgo.BuildDir,
+ "module_output_directory" : MyAgo.OutputDir,
+ "module_debug_directory" : MyAgo.DebugDir,
+
+ "separator" : Separator,
+ "module_tool_definitions" : ToolsDef,
+
+ "shell_command_code" : list(self._SHELL_CMD_[self._Platform].keys()),
+ "shell_command" : list(self._SHELL_CMD_[self._Platform].values()),
+
+ "module_entry_point" : ModuleEntryPoint,
+ "image_entry_point" : ImageEntryPoint,
+ "arch_entry_point" : ArchEntryPoint,
+ "remaining_build_target" : self.ResultFileList,
+ "common_dependency_file" : self.CommonFileDependency,
+ "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
+ "clean_command" : self.GetRemoveDirectoryCommand(["$(OUTPUT_DIR)"]),
+ "cleanall_command" : self.GetRemoveDirectoryCommand(["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]),
+ "dependent_library_build_directory" : self.LibraryBuildDirectoryList,
+ "library_build_command" : LibraryMakeCommandList,
+ "file_macro" : FileMacroList,
+ "file_build_target" : self.BuildTargetList,
+ "backward_compatible_target": BcTargetList,
+ "INCLUDETAG" : "\n".join([self._INCLUDE_CMD_[self._FileType] + " " + os.path.join("$(MODULE_BUILD_DIR)","dependency"),
+ self._INCLUDE_CMD_[self._FileType] + " " + os.path.join("$(MODULE_BUILD_DIR)","deps_target")
+ ])
+ }
+
+ return MakefileTemplateDict
+
+ def ParserGenerateFfsCmd(self):
+ #Add Ffs cmd to self.BuildTargetList
+ OutputFile = ''
+ DepsFileList = []
+
+ for Cmd in self.GenFfsList:
+ if Cmd[2]:
+ for CopyCmd in Cmd[2]:
+ Src, Dst = CopyCmd
+ Src = self.ReplaceMacro(Src)
+ Dst = self.ReplaceMacro(Dst)
+ if Dst not in self.ResultFileList:
+ self.ResultFileList.append(Dst)
+ if '%s :' %(Dst) not in self.BuildTargetList:
+ self.BuildTargetList.append("%s : %s" %(Dst,Src))
+ self.BuildTargetList.append('\t' + self._CP_TEMPLATE_[self._Platform] %{'Src': Src, 'Dst': Dst})
+
+ FfsCmdList = Cmd[0]
+ for index, Str in enumerate(FfsCmdList):
+ if '-o' == Str:
+ OutputFile = FfsCmdList[index + 1]
+ if '-i' == Str or "-oi" == Str:
+ if DepsFileList == []:
+ DepsFileList = [FfsCmdList[index + 1]]
+ else:
+ DepsFileList.append(FfsCmdList[index + 1])
+ DepsFileString = ' '.join(DepsFileList).strip()
+ if DepsFileString == '':
+ continue
+ OutputFile = self.ReplaceMacro(OutputFile)
+ self.ResultFileList.append(OutputFile)
+ DepsFileString = self.ReplaceMacro(DepsFileString)
+ self.BuildTargetList.append('%s : %s' % (OutputFile, DepsFileString))
+ CmdString = ' '.join(FfsCmdList).strip()
+ CmdString = self.ReplaceMacro(CmdString)
+ self.BuildTargetList.append('\t%s' % CmdString)
+
+ self.ParseSecCmd(DepsFileList, Cmd[1])
+ for SecOutputFile, SecDepsFile, SecCmd in self.FfsOutputFileList :
+ self.BuildTargetList.append('%s : %s' % (self.ReplaceMacro(SecOutputFile), self.ReplaceMacro(SecDepsFile)))
+ self.BuildTargetList.append('\t%s' % self.ReplaceMacro(SecCmd))
+ self.FfsOutputFileList = []
+
+ def ParseSecCmd(self, OutputFileList, CmdTuple):
+ for OutputFile in OutputFileList:
+ for SecCmdStr in CmdTuple:
+ SecDepsFileList = []
+ SecCmdList = SecCmdStr.split()
+ CmdName = SecCmdList[0]
+ for index, CmdItem in enumerate(SecCmdList):
+ if '-o' == CmdItem and OutputFile == SecCmdList[index + 1]:
+ index = index + 1
+ while index + 1 < len(SecCmdList):
+ if not SecCmdList[index+1].startswith('-'):
+ SecDepsFileList.append(SecCmdList[index + 1])
+ index = index + 1
+ if CmdName == 'Trim':
+ SecDepsFileList.append(os.path.join('$(DEBUG_DIR)', os.path.basename(OutputFile).replace('offset', 'efi')))
+ if OutputFile.endswith('.ui') or OutputFile.endswith('.ver'):
+ SecDepsFileList.append(os.path.join('$(MODULE_DIR)', '$(MODULE_FILE)'))
+ self.FfsOutputFileList.append((OutputFile, ' '.join(SecDepsFileList), SecCmdStr))
+ if len(SecDepsFileList) > 0:
+ self.ParseSecCmd(SecDepsFileList, CmdTuple)
+ break
+ else:
+ continue
+
+ def ReplaceMacro(self, str):
+ for Macro in self.MacroList:
+ if self._AutoGenObject.Macros[Macro] and os.path.normcase(self._AutoGenObject.Macros[Macro]) in os.path.normcase(str):
+ replace_dir = str[os.path.normcase(str).index(os.path.normcase(self._AutoGenObject.Macros[Macro])): os.path.normcase(str).index(
+ os.path.normcase(self._AutoGenObject.Macros[Macro])) + len(self._AutoGenObject.Macros[Macro])]
+ str = str.replace(replace_dir, '$(' + Macro + ')')
+ return str
+
+ def CommandExceedLimit(self):
+ FlagDict = {
+ 'CC' : { 'Macro' : '$(CC_FLAGS)', 'Value' : False},
+ 'PP' : { 'Macro' : '$(PP_FLAGS)', 'Value' : False},
+ 'APP' : { 'Macro' : '$(APP_FLAGS)', 'Value' : False},
+ 'ASLPP' : { 'Macro' : '$(ASLPP_FLAGS)', 'Value' : False},
+ 'VFRPP' : { 'Macro' : '$(VFRPP_FLAGS)', 'Value' : False},
+ 'ASM' : { 'Macro' : '$(ASM_FLAGS)', 'Value' : False},
+ 'ASLCC' : { 'Macro' : '$(ASLCC_FLAGS)', 'Value' : False},
+ }
+
+ RespDict = {}
+ FileTypeList = []
+ IncPrefix = self._INC_FLAG_[self._AutoGenObject.ToolChainFamily]
+
+ # base on the source files to decide the file type
+ for File in self._AutoGenObject.SourceFileList:
+ for type in self._AutoGenObject.FileTypes:
+ if File in self._AutoGenObject.FileTypes[type]:
+ if type not in FileTypeList:
+ FileTypeList.append(type)
+
+ # calculate the command-line length
+ if FileTypeList:
+ for type in FileTypeList:
+ BuildTargets = self._AutoGenObject.BuildRules[type].BuildTargets
+ for Target in BuildTargets:
+ CommandList = BuildTargets[Target].Commands
+ for SingleCommand in CommandList:
+ Tool = ''
+ SingleCommandLength = len(SingleCommand)
+ SingleCommandList = SingleCommand.split()
+ if len(SingleCommandList) > 0:
+ for Flag in FlagDict:
+ if '$('+ Flag +')' in SingleCommandList[0]:
+ Tool = Flag
+ break
+ if Tool:
+ if 'PATH' not in self._AutoGenObject.BuildOption[Tool]:
+ EdkLogger.error("build", AUTOGEN_ERROR, "%s_PATH doesn't exist in %s ToolChain and %s Arch." %(Tool, self._AutoGenObject.ToolChain, self._AutoGenObject.Arch), ExtraData="[%s]" % str(self._AutoGenObject))
+ SingleCommandLength += len(self._AutoGenObject.BuildOption[Tool]['PATH'])
+ for item in SingleCommandList[1:]:
+ if FlagDict[Tool]['Macro'] in item:
+ if 'FLAGS' not in self._AutoGenObject.BuildOption[Tool]:
+ EdkLogger.error("build", AUTOGEN_ERROR, "%s_FLAGS doesn't exist in %s ToolChain and %s Arch." %(Tool, self._AutoGenObject.ToolChain, self._AutoGenObject.Arch), ExtraData="[%s]" % str(self._AutoGenObject))
+ Str = self._AutoGenObject.BuildOption[Tool]['FLAGS']
+ for Option in self._AutoGenObject.BuildOption:
+ for Attr in self._AutoGenObject.BuildOption[Option]:
+ if Str.find(Option + '_' + Attr) != -1:
+ Str = Str.replace('$(' + Option + '_' + Attr + ')', self._AutoGenObject.BuildOption[Option][Attr])
+ while(Str.find('$(') != -1):
+ for macro in self._AutoGenObject.Macros:
+ MacroName = '$('+ macro + ')'
+ if (Str.find(MacroName) != -1):
+ Str = Str.replace(MacroName, self._AutoGenObject.Macros[macro])
+ break
+ else:
+ break
+ SingleCommandLength += len(Str)
+ elif '$(INC)' in item:
+ SingleCommandLength += self._AutoGenObject.IncludePathLength + len(IncPrefix) * len(self._AutoGenObject.IncludePathList)
+ elif item.find('$(') != -1:
+ Str = item
+ for Option in self._AutoGenObject.BuildOption:
+ for Attr in self._AutoGenObject.BuildOption[Option]:
+ if Str.find(Option + '_' + Attr) != -1:
+ Str = Str.replace('$(' + Option + '_' + Attr + ')', self._AutoGenObject.BuildOption[Option][Attr])
+ while(Str.find('$(') != -1):
+ for macro in self._AutoGenObject.Macros:
+ MacroName = '$('+ macro + ')'
+ if (Str.find(MacroName) != -1):
+ Str = Str.replace(MacroName, self._AutoGenObject.Macros[macro])
+ break
+ else:
+ break
+ SingleCommandLength += len(Str)
+
+ if SingleCommandLength > GlobalData.gCommandMaxLength:
+ FlagDict[Tool]['Value'] = True
+
+ # generate the response file content by combine the FLAGS and INC
+ for Flag in FlagDict:
+ if FlagDict[Flag]['Value']:
+ Key = Flag + '_RESP'
+ RespMacro = FlagDict[Flag]['Macro'].replace('FLAGS', 'RESP')
+ Value = self._AutoGenObject.BuildOption[Flag]['FLAGS']
+ for inc in self._AutoGenObject.IncludePathList:
+ Value += ' ' + IncPrefix + inc
+ for Option in self._AutoGenObject.BuildOption:
+ for Attr in self._AutoGenObject.BuildOption[Option]:
+ if Value.find(Option + '_' + Attr) != -1:
+ Value = Value.replace('$(' + Option + '_' + Attr + ')', self._AutoGenObject.BuildOption[Option][Attr])
+ while (Value.find('$(') != -1):
+ for macro in self._AutoGenObject.Macros:
+ MacroName = '$('+ macro + ')'
+ if (Value.find(MacroName) != -1):
+ Value = Value.replace(MacroName, self._AutoGenObject.Macros[macro])
+ break
+ else:
+ break
+
+ if self._AutoGenObject.ToolChainFamily == 'GCC':
+ RespDict[Key] = Value.replace('\\', '/')
+ else:
+ RespDict[Key] = Value
+ for Target in BuildTargets:
+ for i, SingleCommand in enumerate(BuildTargets[Target].Commands):
+ if FlagDict[Flag]['Macro'] in SingleCommand:
+ BuildTargets[Target].Commands[i] = SingleCommand.replace('$(INC)', '').replace(FlagDict[Flag]['Macro'], RespMacro)
+ return RespDict
+
+ def ProcessBuildTargetList(self, RespFile, ToolsDef):
+ #
+ # Search dependency file list for each source file
+ #
+ ForceIncludedFile = []
+ for File in self._AutoGenObject.AutoGenFileList:
+ if File.Ext == '.h':
+ ForceIncludedFile.append(File)
+ SourceFileList = []
+ OutPutFileList = []
+ for Target in self._AutoGenObject.IntroTargetList:
+ SourceFileList.extend(Target.Inputs)
+ OutPutFileList.extend(Target.Outputs)
+
+ if OutPutFileList:
+ for Item in OutPutFileList:
+ if Item in SourceFileList:
+ SourceFileList.remove(Item)
+
+ FileDependencyDict = {item:ForceIncludedFile for item in SourceFileList}
+
+ for Dependency in FileDependencyDict.values():
+ self.DependencyHeaderFileSet.update(set(Dependency))
+
+ # Get a set of unique package includes from MetaFile
+ parentMetaFileIncludes = set()
+ for aInclude in self._AutoGenObject.PackageIncludePathList:
+ aIncludeName = str(aInclude)
+ parentMetaFileIncludes.add(aIncludeName.lower())
+
+ # Check if header files are listed in metafile
+ # Get a set of unique module header source files from MetaFile
+ headerFilesInMetaFileSet = set()
+ for aFile in self._AutoGenObject.SourceFileList:
+ aFileName = str(aFile)
+ if not aFileName.endswith('.h'):
+ continue
+ headerFilesInMetaFileSet.add(aFileName.lower())
+
+ # Get a set of unique module autogen files
+ localAutoGenFileSet = set()
+ for aFile in self._AutoGenObject.AutoGenFileList:
+ localAutoGenFileSet.add(str(aFile).lower())
+
+ # Get a set of unique module dependency header files
+ # Exclude autogen files and files not in the source directory
+ # and files that are under the package include list
+ headerFileDependencySet = set()
+ localSourceDir = str(self._AutoGenObject.SourceDir).lower()
+ for Dependency in FileDependencyDict.values():
+ for aFile in Dependency:
+ aFileName = str(aFile).lower()
+ # Exclude non-header files
+ if not aFileName.endswith('.h'):
+ continue
+ # Exclude autogen files
+ if aFileName in localAutoGenFileSet:
+ continue
+ # Exclude include out of local scope
+ if localSourceDir not in aFileName:
+ continue
+ # Exclude files covered by package includes
+ pathNeeded = True
+ for aIncludePath in parentMetaFileIncludes:
+ if aIncludePath in aFileName:
+ pathNeeded = False
+ break
+ if not pathNeeded:
+ continue
+ # Keep the file to be checked
+ headerFileDependencySet.add(aFileName)
+
+ # Check if a module dependency header file is missing from the module's MetaFile
+ for aFile in headerFileDependencySet:
+ if aFile in headerFilesInMetaFileSet:
+ continue
+ if GlobalData.gUseHashCache:
+ GlobalData.gModuleBuildTracking[self._AutoGenObject] = 'FAIL_METAFILE'
+ EdkLogger.warn("build","Module MetaFile [Sources] is missing local header!",
+ ExtraData = "Local Header: " + aFile + " not found in " + self._AutoGenObject.MetaFile.Path
+ )
+
+ for File,Dependency in FileDependencyDict.items():
+ if not Dependency:
+ continue
+
+ self._AutoGenObject.AutoGenDepSet |= set(Dependency)
+
+ CmdSumDict = {}
+ CmdTargetDict = {}
+ CmdCppDict = {}
+ DependencyDict = FileDependencyDict.copy()
+
+ # Convert target description object to target string in makefile
+ if self._AutoGenObject.BuildRuleFamily == TAB_COMPILER_MSFT and TAB_C_CODE_FILE in self._AutoGenObject.Targets:
+ for T in self._AutoGenObject.Targets[TAB_C_CODE_FILE]:
+ NewFile = self.PlaceMacro(str(T), self.Macros)
+ if not self.ObjTargetDict.get(T.Target.SubDir):
+ self.ObjTargetDict[T.Target.SubDir] = set()
+ self.ObjTargetDict[T.Target.SubDir].add(NewFile)
+ for Type in self._AutoGenObject.Targets:
+ resp_file_number = 0
+ for T in self._AutoGenObject.Targets[Type]:
+ # Generate related macros if needed
+ if T.GenFileListMacro and T.FileListMacro not in self.FileListMacros:
+ self.FileListMacros[T.FileListMacro] = []
+ if T.GenListFile and T.ListFileMacro not in self.ListFileMacros:
+ self.ListFileMacros[T.ListFileMacro] = []
+ if T.GenIncListFile and T.IncListFileMacro not in self.ListFileMacros:
+ self.ListFileMacros[T.IncListFileMacro] = []
+
+ Deps = []
+ CCodeDeps = []
+ # Add force-dependencies
+ for Dep in T.Dependencies:
+ Deps.append(self.PlaceMacro(str(Dep), self.Macros))
+ if Dep != '$(MAKE_FILE)':
+ CCodeDeps.append(self.PlaceMacro(str(Dep), self.Macros))
+ # Add inclusion-dependencies
+ if len(T.Inputs) == 1 and T.Inputs[0] in FileDependencyDict:
+ for F in FileDependencyDict[T.Inputs[0]]:
+ Deps.append(self.PlaceMacro(str(F), self.Macros))
+ # Add source-dependencies
+ for F in T.Inputs:
+ NewFile = self.PlaceMacro(str(F), self.Macros)
+ # In order to use file list macro as dependency
+ if T.GenListFile:
+ # gnu tools need forward slash path separator, even on Windows
+ self.ListFileMacros[T.ListFileMacro].append(str(F).replace ('\\', '/'))
+ self.FileListMacros[T.FileListMacro].append(NewFile)
+ elif T.GenFileListMacro:
+ self.FileListMacros[T.FileListMacro].append(NewFile)
+ else:
+ Deps.append(NewFile)
+ for key in self.FileListMacros:
+ self.FileListMacros[key].sort()
+ # Use file list macro as dependency
+ if T.GenFileListMacro:
+ Deps.append("$(%s)" % T.FileListMacro)
+ if Type in [TAB_OBJECT_FILE, TAB_STATIC_LIBRARY]:
+ Deps.append("$(%s)" % T.ListFileMacro)
+
+ # VBox - begin: Add $(QUIET)
+ sAllCmds = None;
+ for sCmd in T.Commands:
+ sCmd = sCmd.strip();
+ if len(sCmd) > 0:
+ if sCmd[0] == '-' and self._FileType == 'nmake':
+ sCmd = '-$(EFI_QUIET)' + sCmd[1:];
+ else:
+ sCmd = '$(EFI_QUIET)' + sCmd;
+ if sAllCmds is None:
+ sAllCmds = sCmd;
+ else:
+ sAllCmds += '\n\t' + sCmd;
+ # VBox - end.
+
+ if self._AutoGenObject.BuildRuleFamily == TAB_COMPILER_MSFT and Type == TAB_C_CODE_FILE:
+ T, CmdTarget, CmdTargetDict, CmdCppDict = self.ParserCCodeFile(T, Type, CmdSumDict, CmdTargetDict,
+ CmdCppDict, DependencyDict, RespFile,
+ ToolsDef, resp_file_number)
+ resp_file_number += 1
+ TargetDict = {"target": self.PlaceMacro(T.Target.Path, self.Macros), "cmd": "\n\t".join(T.Commands),"deps": CCodeDeps}
+ # VBox: Original: TargetDict = {"target": self.PlaceMacro(T.Target.Path, self.Macros), "cmd": sAllCmds,"deps": CCodeDeps}
+ CmdLine = self._BUILD_TARGET_TEMPLATE.Replace(TargetDict).rstrip().replace('\t$(OBJLIST', '$(OBJLIST')
+ if T.Commands:
+ CmdLine = '%s%s' %(CmdLine, TAB_LINE_BREAK)
+ if CCodeDeps or CmdLine:
+ self.BuildTargetList.append(CmdLine)
+ else:
+ TargetDict = {"target": self.PlaceMacro(T.Target.Path, self.Macros), "cmd": "\n\t".join(T.Commands),"deps": Deps}
+ # VBox: Original: TargetDict = {"target": self.PlaceMacro(T.Target.Path, self.Macros), "cmd": sAllCmds,"deps": Deps}
+ self.BuildTargetList.append(self._BUILD_TARGET_TEMPLATE.Replace(TargetDict))
+
+ # Add a Makefile rule for targets generating multiple files.
+ # The main output is a prerequisite for the other output files.
+ for i in T.Outputs[1:]:
+ AnnexeTargetDict = {"target": self.PlaceMacro(i.Path, self.Macros), "cmd": "", "deps": self.PlaceMacro(T.Target.Path, self.Macros)}
+ self.BuildTargetList.append(self._BUILD_TARGET_TEMPLATE.Replace(AnnexeTargetDict))
+
+ def ParserCCodeFile(self, T, Type, CmdSumDict, CmdTargetDict, CmdCppDict, DependencyDict, RespFile, ToolsDef,
+ resp_file_number):
+ SaveFilePath = os.path.join(RespFile, "cc_resp_%s.txt" % resp_file_number)
+ if not CmdSumDict:
+ for item in self._AutoGenObject.Targets[Type]:
+ CmdSumDict[item.Target.SubDir] = item.Target.BaseName
+ for CppPath in item.Inputs:
+ Path = self.PlaceMacro(CppPath.Path, self.Macros)
+ if CmdCppDict.get(item.Target.SubDir):
+ CmdCppDict[item.Target.SubDir].append(Path)
+ else:
+ CmdCppDict[item.Target.SubDir] = ['$(MAKE_FILE)', Path]
+ if CppPath.Path in DependencyDict:
+ for Temp in DependencyDict[CppPath.Path]:
+ try:
+ Path = self.PlaceMacro(Temp.Path, self.Macros)
+ except:
+ continue
+ if Path not in (self.CommonFileDependency + CmdCppDict[item.Target.SubDir]):
+ CmdCppDict[item.Target.SubDir].append(Path)
+ if T.Commands:
+ CommandList = T.Commands[:]
+ for Item in CommandList[:]:
+ SingleCommandList = Item.split()
+ if len(SingleCommandList) > 0 and self.CheckCCCmd(SingleCommandList):
+ for Temp in SingleCommandList:
+ if Temp.startswith('/Fo'):
+ CmdSign = '%s%s' % (Temp.rsplit(TAB_SLASH, 1)[0], TAB_SLASH)
+ break
+ else:
+ continue
+ if CmdSign not in list(CmdTargetDict.keys()):
+ cmd = Item.replace(Temp, CmdSign)
+ if SingleCommandList[-1] in cmd:
+ CmdTargetDict[CmdSign] = [cmd.replace(SingleCommandList[-1], "").rstrip(), SingleCommandList[-1]]
+ else:
+ # CmdTargetDict[CmdSign] = "%s %s" % (CmdTargetDict[CmdSign], SingleCommandList[-1])
+ CmdTargetDict[CmdSign].append(SingleCommandList[-1])
+ Index = CommandList.index(Item)
+ CommandList.pop(Index)
+ if SingleCommandList[-1].endswith("%s%s.c" % (TAB_SLASH, CmdSumDict[CmdSign[3:].rsplit(TAB_SLASH, 1)[0]])):
+ Cpplist = CmdCppDict[T.Target.SubDir]
+ Cpplist.insert(0, '$(OBJLIST_%d): ' % list(self.ObjTargetDict.keys()).index(T.Target.SubDir))
+ source_files = CmdTargetDict[CmdSign][1:]
+ source_files.insert(0, " ")
+ if len(source_files)>2:
+ SaveFileOnChange(SaveFilePath, " ".join(source_files), False)
+ T.Commands[Index] = '%s\n\t%s $(cc_resp_%s)' % (
+ ' \\\n\t'.join(Cpplist), CmdTargetDict[CmdSign][0], resp_file_number)
+ ToolsDef.append("cc_resp_%s = @%s" % (resp_file_number, SaveFilePath))
+
+ elif len(source_files)<=2 and len(" ".join(CmdTargetDict[CmdSign][:2]))>GlobalData.gCommandMaxLength:
+ SaveFileOnChange(SaveFilePath, " ".join(source_files), False)
+ T.Commands[Index] = '%s\n\t%s $(cc_resp_%s)' % (
+ ' \\\n\t'.join(Cpplist), CmdTargetDict[CmdSign][0], resp_file_number)
+ ToolsDef.append("cc_resp_%s = @%s" % (resp_file_number, SaveFilePath))
+
+ else:
+ T.Commands[Index] = '%s\n\t%s' % (' \\\n\t'.join(Cpplist), " ".join(CmdTargetDict[CmdSign]))
+ else:
+ T.Commands.pop(Index)
+ return T, CmdSumDict, CmdTargetDict, CmdCppDict
+
+ def CheckCCCmd(self, CommandList):
+ for cmd in CommandList:
+ if '$(CC)' in cmd:
+ return True
+ return False
+ ## For creating makefile targets for dependent libraries
+ def ProcessDependentLibrary(self):
+ for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
+ if not LibraryAutoGen.IsBinaryModule:
+ self.LibraryBuildDirectoryList.append(self.PlaceMacro(LibraryAutoGen.BuildDir, self.Macros))
+
+ ## Return a list containing source file's dependencies
+ #
+ # @param FileList The list of source files
+ # @param ForceInculeList The list of files which will be included forcely
+ # @param SearchPathList The list of search path
+ #
+ # @retval dict The mapping between source file path and its dependencies
+ #
+ def GetFileDependency(self, FileList, ForceInculeList, SearchPathList):
+ Dependency = {}
+ for F in FileList:
+ Dependency[F] = GetDependencyList(self._AutoGenObject, self.FileCache, F, ForceInculeList, SearchPathList)
+ return Dependency
+
+
+## CustomMakefile class
+#
+# This class encapsules makefie and its generation for module. It uses template to generate
+# the content of makefile. The content of makefile will be got from ModuleAutoGen object.
+#
+class CustomMakefile(BuildFile):
+ ## template used to generate the makefile for module with custom makefile
+ _TEMPLATE_ = TemplateString('''\
+${makefile_header}
+
+#
+# Platform Macro Definition
+#
+PLATFORM_NAME = ${platform_name}
+PLATFORM_GUID = ${platform_guid}
+PLATFORM_VERSION = ${platform_version}
+PLATFORM_RELATIVE_DIR = ${platform_relative_directory}
+PLATFORM_DIR = ${platform_dir}
+PLATFORM_OUTPUT_DIR = ${platform_output_directory}
+
+#
+# Module Macro Definition
+#
+MODULE_NAME = ${module_name}
+MODULE_GUID = ${module_guid}
+MODULE_NAME_GUID = ${module_name_guid}
+MODULE_VERSION = ${module_version}
+MODULE_TYPE = ${module_type}
+MODULE_FILE = ${module_file}
+MODULE_FILE_BASE_NAME = ${module_file_base_name}
+BASE_NAME = $(MODULE_NAME)
+MODULE_RELATIVE_DIR = ${module_relative_directory}
+MODULE_DIR = ${module_dir}
+
+#
+# Build Configuration Macro Definition
+#
+ARCH = ${architecture}
+TOOLCHAIN = ${toolchain_tag}
+TOOLCHAIN_TAG = ${toolchain_tag}
+TARGET = ${build_target}
+
+#
+# Build Directory Macro Definition
+#
+# PLATFORM_BUILD_DIR = ${platform_build_directory}
+BUILD_DIR = ${platform_build_directory}
+BIN_DIR = $(BUILD_DIR)${separator}${architecture}
+LIB_DIR = $(BIN_DIR)
+MODULE_BUILD_DIR = ${module_build_directory}
+OUTPUT_DIR = ${module_output_directory}
+DEBUG_DIR = ${module_debug_directory}
+DEST_DIR_OUTPUT = $(OUTPUT_DIR)
+DEST_DIR_DEBUG = $(DEBUG_DIR)
+
+#
+# Tools definitions specific to this module
+#
+${BEGIN}${module_tool_definitions}
+${END}
+MAKE_FILE = ${makefile_path}
+
+#
+# Shell Command Macro
+#
+${BEGIN}${shell_command_code} = ${shell_command}
+${END}
+
+${custom_makefile_content}
+
+#
+# Target used when called from platform makefile, which will bypass the build of dependent libraries
+#
+
+pbuild: init all
+
+
+#
+# ModuleTarget
+#
+
+mbuild: init all
+
+#
+# Build Target used in multi-thread build mode, which no init target is needed
+#
+
+tbuild: all
+
+#
+# Initialization target: print build information and create necessary directories
+#
+init:
+\t-@echo Building ... $(MODULE_DIR)${separator}$(MODULE_FILE) [$(ARCH)]
+${BEGIN}\t-@${create_directory_command}\n${END}\
+
+''')
+
+ ## Constructor of CustomMakefile
+ #
+ # @param ModuleAutoGen Object of ModuleAutoGen class
+ #
+ def __init__(self, ModuleAutoGen):
+ BuildFile.__init__(self, ModuleAutoGen)
+ self.PlatformInfo = self._AutoGenObject.PlatformInfo
+ self.IntermediateDirectoryList = ["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]
+ self.DependencyHeaderFileSet = set()
+
+ # Compose a dict object containing information used to do replacement in template
+ @property
+ def _TemplateDict(self):
+ Separator = self._SEP_[self._Platform]
+ MyAgo = self._AutoGenObject
+ if self._FileType not in MyAgo.CustomMakefile:
+ EdkLogger.error('build', OPTION_NOT_SUPPORTED, "No custom makefile for %s" % self._FileType,
+ ExtraData="[%s]" % str(MyAgo))
+ MakefilePath = mws.join(
+ MyAgo.WorkspaceDir,
+ MyAgo.CustomMakefile[self._FileType]
+ )
+ try:
+ CustomMakefile = open(MakefilePath, 'r').read()
+ except:
+ EdkLogger.error('build', FILE_OPEN_FAILURE, File=str(MyAgo),
+ ExtraData=MyAgo.CustomMakefile[self._FileType])
+
+ # tools definitions
+ ToolsDef = []
+ for Tool in MyAgo.BuildOption:
+ # Don't generate MAKE_FLAGS in makefile. It's put in environment variable.
+ if Tool == "MAKE":
+ continue
+ for Attr in MyAgo.BuildOption[Tool]:
+ if Attr == "FAMILY":
+ continue
+ elif Attr == "PATH":
+ ToolsDef.append("%s = %s" % (Tool, MyAgo.BuildOption[Tool][Attr]))
+ else:
+ ToolsDef.append("%s_%s = %s" % (Tool, Attr, MyAgo.BuildOption[Tool][Attr]))
+ ToolsDef.append("")
+
+ MakefileName = self.getMakefileName()
+ MakefileTemplateDict = {
+ "makefile_header" : self._FILE_HEADER_[self._FileType],
+ "makefile_path" : os.path.join("$(MODULE_BUILD_DIR)", MakefileName),
+ "platform_name" : self.PlatformInfo.Name,
+ "platform_guid" : self.PlatformInfo.Guid,
+ "platform_version" : self.PlatformInfo.Version,
+ "platform_relative_directory": self.PlatformInfo.SourceDir,
+ "platform_output_directory" : self.PlatformInfo.OutputDir,
+ "platform_dir" : MyAgo.Macros["PLATFORM_DIR"],
+
+ "module_name" : MyAgo.Name,
+ "module_guid" : MyAgo.Guid,
+ "module_name_guid" : MyAgo.UniqueBaseName,
+ "module_version" : MyAgo.Version,
+ "module_type" : MyAgo.ModuleType,
+ "module_file" : MyAgo.MetaFile,
+ "module_file_base_name" : MyAgo.MetaFile.BaseName,
+ "module_relative_directory" : MyAgo.SourceDir,
+ "module_dir" : mws.join (MyAgo.WorkspaceDir, MyAgo.SourceDir),
+
+ "architecture" : MyAgo.Arch,
+ "toolchain_tag" : MyAgo.ToolChain,
+ "build_target" : MyAgo.BuildTarget,
+
+ "platform_build_directory" : self.PlatformInfo.BuildDir,
+ "module_build_directory" : MyAgo.BuildDir,
+ "module_output_directory" : MyAgo.OutputDir,
+ "module_debug_directory" : MyAgo.DebugDir,
+
+ "separator" : Separator,
+ "module_tool_definitions" : ToolsDef,
+
+ "shell_command_code" : list(self._SHELL_CMD_[self._Platform].keys()),
+ "shell_command" : list(self._SHELL_CMD_[self._Platform].values()),
+
+ "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
+ "custom_makefile_content" : CustomMakefile
+ }
+
+ return MakefileTemplateDict
+
+## PlatformMakefile class
+#
+# This class encapsules makefie and its generation for platform. It uses
+# template to generate the content of makefile. The content of makefile will be
+# got from PlatformAutoGen object.
+#
+class PlatformMakefile(BuildFile):
+ ## template used to generate the makefile for platform
+ _TEMPLATE_ = TemplateString('''\
+${makefile_header}
+
+#
+# Platform Macro Definition
+#
+PLATFORM_NAME = ${platform_name}
+PLATFORM_GUID = ${platform_guid}
+PLATFORM_VERSION = ${platform_version}
+PLATFORM_FILE = ${platform_file}
+PLATFORM_DIR = ${platform_dir}
+PLATFORM_OUTPUT_DIR = ${platform_output_directory}
+
+#
+# Build Configuration Macro Definition
+#
+TOOLCHAIN = ${toolchain_tag}
+TOOLCHAIN_TAG = ${toolchain_tag}
+TARGET = ${build_target}
+
+#
+# Build Directory Macro Definition
+#
+BUILD_DIR = ${platform_build_directory}
+FV_DIR = ${platform_build_directory}${separator}FV
+
+#
+# Shell Command Macro
+#
+${BEGIN}${shell_command_code} = ${shell_command}
+${END}
+
+MAKE = ${make_path}
+MAKE_FILE = ${makefile_path}
+
+#
+# Default target
+#
+all: init build_libraries build_modules
+
+#
+# Initialization target: print build information and create necessary directories
+#
+init:
+\t-@echo Building ... $(PLATFORM_FILE) [${build_architecture_list}]
+\t${BEGIN}-@${create_directory_command}
+\t${END}
+#
+# library build target
+#
+libraries: init build_libraries
+
+#
+# module build target
+#
+modules: init build_libraries build_modules
+
+#
+# Build all libraries:
+#
+build_libraries:
+${BEGIN}\t@"$(MAKE)" $(MAKE_FLAGS) -f ${library_makefile_list} pbuild
+${END}\t@cd $(BUILD_DIR)
+
+#
+# Build all modules:
+#
+build_modules:
+${BEGIN}\t@"$(MAKE)" $(MAKE_FLAGS) -f ${module_makefile_list} pbuild
+${END}\t@cd $(BUILD_DIR)
+
+#
+# Clean intermediate files
+#
+clean:
+\t${BEGIN}-@${library_build_command} clean
+\t${END}${BEGIN}-@${module_build_command} clean
+\t${END}@cd $(BUILD_DIR)
+
+#
+# Clean all generated files except to makefile
+#
+cleanall:
+${BEGIN}\t${cleanall_command}
+${END}
+
+#
+# Clean all library files
+#
+cleanlib:
+\t${BEGIN}-@${library_build_command} cleanall
+\t${END}@cd $(BUILD_DIR)\n
+''')
+
+ ## Constructor of PlatformMakefile
+ #
+ # @param ModuleAutoGen Object of PlatformAutoGen class
+ #
+ def __init__(self, PlatformAutoGen):
+ BuildFile.__init__(self, PlatformAutoGen)
+ self.ModuleBuildCommandList = []
+ self.ModuleMakefileList = []
+ self.IntermediateDirectoryList = []
+ self.ModuleBuildDirectoryList = []
+ self.LibraryBuildDirectoryList = []
+ self.LibraryMakeCommandList = []
+ self.DependencyHeaderFileSet = set()
+
+ # Compose a dict object containing information used to do replacement in template
+ @property
+ def _TemplateDict(self):
+ Separator = self._SEP_[self._Platform]
+
+ MyAgo = self._AutoGenObject
+ if "MAKE" not in MyAgo.ToolDefinition or "PATH" not in MyAgo.ToolDefinition["MAKE"]:
+ EdkLogger.error("build", OPTION_MISSING, "No MAKE command defined. Please check your tools_def.txt!",
+ ExtraData="[%s]" % str(MyAgo))
+
+ self.IntermediateDirectoryList = ["$(BUILD_DIR)"]
+ self.ModuleBuildDirectoryList = self.GetModuleBuildDirectoryList()
+ self.LibraryBuildDirectoryList = self.GetLibraryBuildDirectoryList()
+
+ MakefileName = self.getMakefileName()
+ LibraryMakefileList = []
+ LibraryMakeCommandList = []
+ for D in self.LibraryBuildDirectoryList:
+ D = self.PlaceMacro(D, {"BUILD_DIR":MyAgo.BuildDir})
+ Makefile = os.path.join(D, MakefileName)
+ Command = self._MAKE_TEMPLATE_[self._Platform] % {"file":Makefile}
+ LibraryMakefileList.append(Makefile)
+ LibraryMakeCommandList.append(Command)
+ self.LibraryMakeCommandList = LibraryMakeCommandList
+
+ ModuleMakefileList = []
+ ModuleMakeCommandList = []
+ for D in self.ModuleBuildDirectoryList:
+ D = self.PlaceMacro(D, {"BUILD_DIR":MyAgo.BuildDir})
+ Makefile = os.path.join(D, MakefileName)
+ Command = self._MAKE_TEMPLATE_[self._Platform] % {"file":Makefile}
+ ModuleMakefileList.append(Makefile)
+ ModuleMakeCommandList.append(Command)
+
+ MakefileTemplateDict = {
+ "makefile_header" : self._FILE_HEADER_[self._FileType],
+ "makefile_path" : os.path.join("$(BUILD_DIR)", MakefileName),
+ "make_path" : MyAgo.ToolDefinition["MAKE"]["PATH"],
+ "makefile_name" : MakefileName,
+ "platform_name" : MyAgo.Name,
+ "platform_guid" : MyAgo.Guid,
+ "platform_version" : MyAgo.Version,
+ "platform_file" : MyAgo.MetaFile,
+ "platform_relative_directory": MyAgo.SourceDir,
+ "platform_output_directory" : MyAgo.OutputDir,
+ "platform_build_directory" : MyAgo.BuildDir,
+ "platform_dir" : MyAgo.Macros["PLATFORM_DIR"],
+
+ "toolchain_tag" : MyAgo.ToolChain,
+ "build_target" : MyAgo.BuildTarget,
+ "shell_command_code" : list(self._SHELL_CMD_[self._Platform].keys()),
+ "shell_command" : list(self._SHELL_CMD_[self._Platform].values()),
+ "build_architecture_list" : MyAgo.Arch,
+ "architecture" : MyAgo.Arch,
+ "separator" : Separator,
+ "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
+ "cleanall_command" : self.GetRemoveDirectoryCommand(self.IntermediateDirectoryList),
+ "library_makefile_list" : LibraryMakefileList,
+ "module_makefile_list" : ModuleMakefileList,
+ "library_build_command" : LibraryMakeCommandList,
+ "module_build_command" : ModuleMakeCommandList,
+ }
+
+ return MakefileTemplateDict
+
+ ## Get the root directory list for intermediate files of all modules build
+ #
+ # @retval list The list of directory
+ #
+ def GetModuleBuildDirectoryList(self):
+ DirList = []
+ for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList:
+ if not ModuleAutoGen.IsBinaryModule:
+ DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir))
+ return DirList
+
+ ## Get the root directory list for intermediate files of all libraries build
+ #
+ # @retval list The list of directory
+ #
+ def GetLibraryBuildDirectoryList(self):
+ DirList = []
+ for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
+ if not LibraryAutoGen.IsBinaryModule:
+ DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))
+ return DirList
+
+## TopLevelMakefile class
+#
+# This class encapsules makefie and its generation for entrance makefile. It
+# uses template to generate the content of makefile. The content of makefile
+# will be got from WorkspaceAutoGen object.
+#
+class TopLevelMakefile(BuildFile):
+ ## template used to generate toplevel makefile
+ _TEMPLATE_ = TemplateString('''${BEGIN}\tGenFds -f ${fdf_file} --conf=${conf_directory} -o ${platform_build_directory} -t ${toolchain_tag} -b ${build_target} -p ${active_platform} -a ${build_architecture_list} ${extra_options}${END}${BEGIN} -r ${fd} ${END}${BEGIN} -i ${fv} ${END}${BEGIN} -C ${cap} ${END}${BEGIN} -D ${macro} ${END}''')
+
+ ## Constructor of TopLevelMakefile
+ #
+ # @param Workspace Object of WorkspaceAutoGen class
+ #
+ def __init__(self, Workspace):
+ BuildFile.__init__(self, Workspace)
+ self.IntermediateDirectoryList = []
+ self.DependencyHeaderFileSet = set()
+
+ # Compose a dict object containing information used to do replacement in template
+ @property
+ def _TemplateDict(self):
+ Separator = self._SEP_[self._Platform]
+
+ # any platform autogen object is ok because we just need common information
+ MyAgo = self._AutoGenObject
+
+ if "MAKE" not in MyAgo.ToolDefinition or "PATH" not in MyAgo.ToolDefinition["MAKE"]:
+ EdkLogger.error("build", OPTION_MISSING, "No MAKE command defined. Please check your tools_def.txt!",
+ ExtraData="[%s]" % str(MyAgo))
+
+ for Arch in MyAgo.ArchList:
+ self.IntermediateDirectoryList.append(Separator.join(["$(BUILD_DIR)", Arch]))
+ self.IntermediateDirectoryList.append("$(FV_DIR)")
+
+ # TRICK: for not generating GenFds call in makefile if no FDF file
+ MacroList = []
+ if MyAgo.FdfFile is not None and MyAgo.FdfFile != "":
+ FdfFileList = [MyAgo.FdfFile]
+ # macros passed to GenFds
+ MacroDict = {}
+ MacroDict.update(GlobalData.gGlobalDefines)
+ MacroDict.update(GlobalData.gCommandLineDefines)
+ for MacroName in MacroDict:
+ if MacroDict[MacroName] != "":
+ MacroList.append('"%s=%s"' % (MacroName, MacroDict[MacroName].replace('\\', '\\\\')))
+ else:
+ MacroList.append('"%s"' % MacroName)
+ else:
+ FdfFileList = []
+
+ # pass extra common options to external program called in makefile, currently GenFds.exe
+ ExtraOption = ''
+ LogLevel = EdkLogger.GetLevel()
+ if LogLevel == EdkLogger.VERBOSE:
+ ExtraOption += " -v"
+ elif LogLevel <= EdkLogger.DEBUG_9:
+ ExtraOption += " -d %d" % (LogLevel - 1)
+ elif LogLevel == EdkLogger.QUIET:
+ ExtraOption += " -q"
+
+ if GlobalData.gCaseInsensitive:
+ ExtraOption += " -c"
+ if not GlobalData.gEnableGenfdsMultiThread:
+ ExtraOption += " --no-genfds-multi-thread"
+ if GlobalData.gIgnoreSource:
+ ExtraOption += " --ignore-sources"
+
+ for pcd in GlobalData.BuildOptionPcd:
+ if pcd[2]:
+ pcdname = '.'.join(pcd[0:3])
+ else:
+ pcdname = '.'.join(pcd[0:2])
+ if pcd[3].startswith('{'):
+ ExtraOption += " --pcd " + pcdname + '=' + 'H' + '"' + pcd[3] + '"'
+ else:
+ ExtraOption += " --pcd " + pcdname + '=' + pcd[3]
+
+ MakefileName = self.getMakefileName()
+ SubBuildCommandList = []
+ for A in MyAgo.ArchList:
+ Command = self._MAKE_TEMPLATE_[self._Platform] % {"file":os.path.join("$(BUILD_DIR)", A, MakefileName)}
+ SubBuildCommandList.append(Command)
+
+ MakefileTemplateDict = {
+ "makefile_header" : self._FILE_HEADER_[self._FileType],
+ "makefile_path" : os.path.join("$(BUILD_DIR)", MakefileName),
+ "make_path" : MyAgo.ToolDefinition["MAKE"]["PATH"],
+ "platform_name" : MyAgo.Name,
+ "platform_guid" : MyAgo.Guid,
+ "platform_version" : MyAgo.Version,
+ "platform_build_directory" : MyAgo.BuildDir,
+ "conf_directory" : GlobalData.gConfDirectory,
+
+ "toolchain_tag" : MyAgo.ToolChain,
+ "build_target" : MyAgo.BuildTarget,
+ "shell_command_code" : list(self._SHELL_CMD_[self._Platform].keys()),
+ "shell_command" : list(self._SHELL_CMD_[self._Platform].values()),
+ 'arch' : list(MyAgo.ArchList),
+ "build_architecture_list" : ','.join(MyAgo.ArchList),
+ "separator" : Separator,
+ "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
+ "cleanall_command" : self.GetRemoveDirectoryCommand(self.IntermediateDirectoryList),
+ "sub_build_command" : SubBuildCommandList,
+ "fdf_file" : FdfFileList,
+ "active_platform" : str(MyAgo),
+ "fd" : MyAgo.FdTargetList,
+ "fv" : MyAgo.FvTargetList,
+ "cap" : MyAgo.CapTargetList,
+ "extra_options" : ExtraOption,
+ "macro" : MacroList,
+ }
+
+ return MakefileTemplateDict
+
+ ## Get the root directory list for intermediate files of all modules build
+ #
+ # @retval list The list of directory
+ #
+ def GetModuleBuildDirectoryList(self):
+ DirList = []
+ for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList:
+ if not ModuleAutoGen.IsBinaryModule:
+ DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir))
+ return DirList
+
+ ## Get the root directory list for intermediate files of all libraries build
+ #
+ # @retval list The list of directory
+ #
+ def GetLibraryBuildDirectoryList(self):
+ DirList = []
+ for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
+ if not LibraryAutoGen.IsBinaryModule:
+ DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))
+ return DirList
+
+## Find dependencies for one source file
+#
+# By searching recursively "#include" directive in file, find out all the
+# files needed by given source file. The dependencies will be only searched
+# in given search path list.
+#
+# @param File The source file
+# @param ForceInculeList The list of files which will be included forcely
+# @param SearchPathList The list of search path
+#
+# @retval list The list of files the given source file depends on
+#
+def GetDependencyList(AutoGenObject, FileCache, File, ForceList, SearchPathList):
+ EdkLogger.debug(EdkLogger.DEBUG_1, "Try to get dependency files for %s" % File)
+ FileStack = [File] + ForceList
+ DependencySet = set()
+
+ if AutoGenObject.Arch not in gDependencyDatabase:
+ gDependencyDatabase[AutoGenObject.Arch] = {}
+ DepDb = gDependencyDatabase[AutoGenObject.Arch]
+
+ while len(FileStack) > 0:
+ F = FileStack.pop()
+
+ FullPathDependList = []
+ if F in FileCache:
+ for CacheFile in FileCache[F]:
+ FullPathDependList.append(CacheFile)
+ if CacheFile not in DependencySet:
+ FileStack.append(CacheFile)
+ DependencySet.update(FullPathDependList)
+ continue
+
+ CurrentFileDependencyList = []
+ if F in DepDb:
+ CurrentFileDependencyList = DepDb[F]
+ else:
+ try:
+ Fd = open(F.Path, 'rb')
+ FileContent = Fd.read()
+ Fd.close()
+ except BaseException as X:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))
+ if len(FileContent) == 0:
+ continue
+ try:
+ if FileContent[0] == 0xff or FileContent[0] == 0xfe:
+ FileContent = FileContent.decode('utf-16')
+ else:
+ FileContent = FileContent.decode()
+ except:
+ # The file is not txt file. for example .mcb file
+ continue
+ IncludedFileList = gIncludePattern.findall(FileContent)
+
+ for Inc in IncludedFileList:
+ Inc = Inc.strip()
+ # if there's macro used to reference header file, expand it
+ HeaderList = gMacroPattern.findall(Inc)
+ if len(HeaderList) == 1 and len(HeaderList[0]) == 2:
+ HeaderType = HeaderList[0][0]
+ HeaderKey = HeaderList[0][1]
+ if HeaderType in gIncludeMacroConversion:
+ Inc = gIncludeMacroConversion[HeaderType] % {"HeaderKey" : HeaderKey}
+ else:
+ # not known macro used in #include, always build the file by
+ # returning a empty dependency
+ FileCache[File] = []
+ return []
+ Inc = os.path.normpath(Inc)
+ CurrentFileDependencyList.append(Inc)
+ DepDb[F] = CurrentFileDependencyList
+
+ CurrentFilePath = F.Dir
+ PathList = [CurrentFilePath] + SearchPathList
+ for Inc in CurrentFileDependencyList:
+ for SearchPath in PathList:
+ FilePath = os.path.join(SearchPath, Inc)
+ if FilePath in gIsFileMap:
+ if not gIsFileMap[FilePath]:
+ continue
+ # If isfile is called too many times, the performance is slow down.
+ elif not os.path.isfile(FilePath):
+ gIsFileMap[FilePath] = False
+ continue
+ else:
+ gIsFileMap[FilePath] = True
+ FilePath = PathClass(FilePath)
+ FullPathDependList.append(FilePath)
+ if FilePath not in DependencySet:
+ FileStack.append(FilePath)
+ break
+ else:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "%s included by %s was not found "\
+ "in any given path:\n\t%s" % (Inc, F, "\n\t".join(SearchPathList)))
+
+ FileCache[F] = FullPathDependList
+ DependencySet.update(FullPathDependList)
+
+ DependencySet.update(ForceList)
+ if File in DependencySet:
+ DependencySet.remove(File)
+ DependencyList = list(DependencySet) # remove duplicate ones
+
+ return DependencyList
+
+# This acts like the main() function for the script, unless it is 'import'ed into another script.
+if __name__ == '__main__':
+ pass
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenPcdDb.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenPcdDb.py
new file mode 100755
index 00000000..8b2cbebc
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenPcdDb.py
@@ -0,0 +1,1615 @@
+## @file
+# Routines for generating Pcd Database
+#
+# Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from __future__ import absolute_import
+from io import BytesIO
+from Common.Misc import *
+from Common.StringUtils import StringToArray
+from struct import pack
+from .ValidCheckingInfoObject import VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER
+from .ValidCheckingInfoObject import VAR_CHECK_PCD_VARIABLE_TAB
+from .ValidCheckingInfoObject import GetValidationObject
+from Common.VariableAttributes import VariableAttributes
+import copy
+from struct import unpack
+from Common.DataType import *
+from Common import GlobalData
+from Common import EdkLogger
+import Common.LongFilePathOs as os
+
+DATABASE_VERSION = 7
+
+gPcdDatabaseAutoGenC = TemplateString("""
+//
+// External PCD database debug information
+//
+#if 0
+${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = {
+ /* SkuIdTable */
+ { ${BEGIN}${SKUID_VALUE}, ${END} },
+${BEGIN} { ${INIT_VALUE_UINT64} }, /* ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}] */
+${END}
+${BEGIN} ${VARDEF_VALUE_UINT64}, /* ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64} */
+${END}
+${BEGIN} { ${INIT_VALUE_UINT32} }, /* ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}] */
+${END}
+${BEGIN} ${VARDEF_VALUE_UINT32}, /* ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32} */
+${END}
+ /* VPD */
+${BEGIN} { ${VPD_HEAD_VALUE} }, /* ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}] */
+${END}
+ /* ExMapTable */
+ {
+${BEGIN} { ${EXMAPPING_TABLE_EXTOKEN}, ${EXMAPPING_TABLE_LOCAL_TOKEN}, ${EXMAPPING_TABLE_GUID_INDEX} },
+${END}
+ },
+ /* LocalTokenNumberTable */
+ {
+${BEGIN} offsetof(${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.${TOKEN_CNAME}_${TOKEN_GUID}${VARDEF_HEADER}) | ${TOKEN_TYPE},
+${END}
+ },
+ /* GuidTable */
+ {
+${BEGIN} ${GUID_STRUCTURE},
+${END}
+ },
+${BEGIN} { ${STRING_HEAD_VALUE} }, /* ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}] */
+${END}
+${BEGIN} /* ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}_Variable_Header[${VARIABLE_HEAD_NUMSKUS_DECL}] */
+ {
+ ${VARIABLE_HEAD_VALUE}
+ },
+${END}
+/* SkuHead */
+ {
+ ${BEGIN} offsetof (${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.${TOKEN_CNAME}_${TOKEN_GUID}${VARDEF_HEADER}) | ${TOKEN_TYPE}, /* */
+ offsetof (${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.SkuHead) /* */
+ ${END}
+ },
+ /* StringTable */
+${BEGIN} ${STRING_TABLE_VALUE}, /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */
+${END}
+ /* SizeTable */
+ {
+${BEGIN} ${SIZE_TABLE_MAXIMUM_LENGTH}, ${SIZE_TABLE_CURRENT_LENGTH}, /* ${SIZE_TABLE_CNAME}_${SIZE_TABLE_GUID} */
+${END}
+ },
+${BEGIN} { ${INIT_VALUE_UINT16} }, /* ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}] */
+${END}
+${BEGIN} ${VARDEF_VALUE_UINT16}, /* ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16} */
+${END}
+${BEGIN} { ${INIT_VALUE_UINT8} }, /* ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}] */
+${END}
+${BEGIN} ${VARDEF_VALUE_UINT8}, /* ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8} */
+${END}
+${BEGIN} { ${INIT_VALUE_BOOLEAN} }, /* ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}] */
+${END}
+${BEGIN} ${VARDEF_VALUE_BOOLEAN}, /* ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN} */
+${END}
+ ${SYSTEM_SKU_ID_VALUE}
+};
+#endif
+""")
+
+## Mapping between PCD driver type and EFI phase
+gPcdPhaseMap = {
+ "PEI_PCD_DRIVER" : "PEI",
+ "DXE_PCD_DRIVER" : "DXE"
+}
+
+gPcdDatabaseAutoGenH = TemplateString("""
+#define PCD_${PHASE}_SERVICE_DRIVER_VERSION ${SERVICE_DRIVER_VERSION}
+
+//
+// External PCD database debug information
+//
+#if 0
+#define ${PHASE}_GUID_TABLE_SIZE ${GUID_TABLE_SIZE}
+#define ${PHASE}_STRING_TABLE_SIZE ${STRING_TABLE_SIZE}
+#define ${PHASE}_SKUID_TABLE_SIZE ${SKUID_TABLE_SIZE}
+#define ${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE ${LOCAL_TOKEN_NUMBER_TABLE_SIZE}
+#define ${PHASE}_LOCAL_TOKEN_NUMBER ${LOCAL_TOKEN_NUMBER}
+#define ${PHASE}_EXMAPPING_TABLE_SIZE ${EXMAPPING_TABLE_SIZE}
+#define ${PHASE}_EX_TOKEN_NUMBER ${EX_TOKEN_NUMBER}
+#define ${PHASE}_SIZE_TABLE_SIZE ${SIZE_TABLE_SIZE}
+#define ${PHASE}_GUID_TABLE_EMPTY ${GUID_TABLE_EMPTY}
+#define ${PHASE}_STRING_TABLE_EMPTY ${STRING_TABLE_EMPTY}
+#define ${PHASE}_SKUID_TABLE_EMPTY ${SKUID_TABLE_EMPTY}
+#define ${PHASE}_DATABASE_EMPTY ${DATABASE_EMPTY}
+#define ${PHASE}_EXMAP_TABLE_EMPTY ${EXMAP_TABLE_EMPTY}
+
+typedef struct {
+ UINT64 SkuIdTable[${PHASE}_SKUID_TABLE_SIZE];
+${BEGIN} UINT64 ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}];
+${END}
+${BEGIN} UINT64 ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64};
+${END}
+${BEGIN} UINT32 ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}];
+${END}
+${BEGIN} UINT32 ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32};
+${END}
+${BEGIN} VPD_HEAD ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}];
+${END}
+ DYNAMICEX_MAPPING ExMapTable[${PHASE}_EXMAPPING_TABLE_SIZE];
+ UINT32 LocalTokenNumberTable[${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE];
+ GUID GuidTable[${PHASE}_GUID_TABLE_SIZE];
+${BEGIN} STRING_HEAD ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}];
+${END}
+${BEGIN} VARIABLE_HEAD ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}_Variable_Header[${VARIABLE_HEAD_NUMSKUS_DECL}];
+${BEGIN} UINT8 StringTable${STRING_TABLE_INDEX}[${STRING_TABLE_LENGTH}]; /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */
+${END}
+ SIZE_INFO SizeTable[${PHASE}_SIZE_TABLE_SIZE];
+${BEGIN} UINT16 ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}];
+${END}
+${BEGIN} UINT16 ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16};
+${END}
+${BEGIN} UINT8 ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}];
+${END}
+${BEGIN} UINT8 ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8};
+${END}
+${BEGIN} BOOLEAN ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}];
+${END}
+${BEGIN} BOOLEAN ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN};
+${END}
+${SYSTEM_SKU_ID}
+} ${PHASE}_PCD_DATABASE_INIT;
+
+typedef struct {
+${PCD_DATABASE_UNINIT_EMPTY}
+${BEGIN} UINT64 ${UNINIT_CNAME_DECL_UINT64}_${UNINIT_GUID_DECL_UINT64}[${UNINIT_NUMSKUS_DECL_UINT64}];
+${END}
+${BEGIN} UINT32 ${UNINIT_CNAME_DECL_UINT32}_${UNINIT_GUID_DECL_UINT32}[${UNINIT_NUMSKUS_DECL_UINT32}];
+${END}
+${BEGIN} UINT16 ${UNINIT_CNAME_DECL_UINT16}_${UNINIT_GUID_DECL_UINT16}[${UNINIT_NUMSKUS_DECL_UINT16}];
+${END}
+${BEGIN} UINT8 ${UNINIT_CNAME_DECL_UINT8}_${UNINIT_GUID_DECL_UINT8}[${UNINIT_NUMSKUS_DECL_UINT8}];
+${END}
+${BEGIN} BOOLEAN ${UNINIT_CNAME_DECL_BOOLEAN}_${UNINIT_GUID_DECL_BOOLEAN}[${UNINIT_NUMSKUS_DECL_BOOLEAN}];
+${END}
+} ${PHASE}_PCD_DATABASE_UNINIT;
+
+typedef struct {
+ //GUID Signature; // PcdDataBaseGuid
+ //UINT32 BuildVersion;
+ //UINT32 Length;
+ //SKU_ID SystemSkuId; // Current SkuId value.
+ //UINT32 LengthForAllSkus; // Length of all SKU PCD DB
+ //UINT32 UninitDataBaseSize;// Total size for PCD those default value with 0.
+ //TABLE_OFFSET LocalTokenNumberTableOffset;
+ //TABLE_OFFSET ExMapTableOffset;
+ //TABLE_OFFSET GuidTableOffset;
+ //TABLE_OFFSET StringTableOffset;
+ //TABLE_OFFSET SizeTableOffset;
+ //TABLE_OFFSET SkuIdTableOffset;
+ //TABLE_OFFSET PcdNameTableOffset;
+ //UINT16 LocalTokenCount; // LOCAL_TOKEN_NUMBER for all
+ //UINT16 ExTokenCount; // EX_TOKEN_NUMBER for DynamicEx
+ //UINT16 GuidTableCount; // The Number of Guid in GuidTable
+ //UINT8 Pad[6];
+ ${PHASE}_PCD_DATABASE_INIT Init;
+ ${PHASE}_PCD_DATABASE_UNINIT Uninit;
+} ${PHASE}_PCD_DATABASE;
+
+#define ${PHASE}_NEX_TOKEN_NUMBER (${PHASE}_LOCAL_TOKEN_NUMBER - ${PHASE}_EX_TOKEN_NUMBER)
+#endif
+""")
+
+
+gEmptyPcdDatabaseAutoGenC = TemplateString("""
+//
+// External PCD database debug information
+//
+#if 0
+${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = {
+ /* SkuIdTable */
+ { 0 },
+ /* ExMapTable */
+ {
+ {0, 0, 0}
+ },
+ /* LocalTokenNumberTable */
+ {
+ 0
+ },
+ /* GuidTable */
+ {
+ {0x00000000, 0x0000, 0x0000, {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}}
+ },
+ /* StringTable */
+ { 0 },
+ /* SkuHead */
+ {
+ 0, 0
+ },
+ /* SizeTable */
+ {
+ 0, 0
+ },
+ ${SYSTEM_SKU_ID_VALUE}
+};
+#endif
+""")
+
+## DbItemList
+#
+# The class holds the Pcd database items. ItemSize if not zero should match the item datum type in the C structure.
+# When the structure is changed, remember to check the ItemSize and the related PackStr in PackData()
+# RawDataList is the RawData that may need some kind of calculation or transformation,
+# the DataList corresponds to the data that need to be written to database. If DataList is not present, then RawDataList
+# will be written to the database.
+#
+class DbItemList:
+ def __init__(self, ItemSize, DataList=None, RawDataList=None):
+ self.ItemSize = ItemSize
+ self.DataList = DataList if DataList else []
+ self.RawDataList = RawDataList if RawDataList else []
+ self.ListSize = 0
+
+ def GetInterOffset(self, Index):
+ Offset = 0
+ if self.ItemSize == 0:
+ #
+ # Variable length, need to calculate one by one
+ #
+ assert(Index < len(self.RawDataList))
+ for ItemIndex in range(Index):
+ Offset += len(self.RawDataList[ItemIndex])
+ else:
+ Offset = self.ItemSize * Index
+
+ return Offset
+
+ def GetListSize(self):
+ if self.ListSize:
+ return self.ListSize
+ if len(self.RawDataList) == 0:
+ self.ListSize = 0
+ return self.ListSize
+ if self.ItemSize == 0:
+ self.ListSize = self.GetInterOffset(len(self.RawDataList) - 1) + len(self.RawDataList[len(self.RawDataList)-1])
+ else:
+ self.ListSize = self.ItemSize * len(self.RawDataList)
+ return self.ListSize
+
+ def PackData(self):
+ ## PackGuid
+ #
+ # Pack the GUID value in C structure format into data array
+ #
+ # @param GuidStructureValue: The GUID value in C structure format
+ #
+ # @retval Buffer: a data array contains the Guid
+ #
+ def PackGuid(GuidStructureValue):
+ GuidString = GuidStructureStringToGuidString(GuidStructureValue)
+ return PackGUID(GuidString.split('-'))
+
+ PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
+
+ Buffer = bytearray()
+ for Datas in self.RawDataList:
+ if type(Datas) in (list, tuple):
+ for Data in Datas:
+ if PackStr:
+ Buffer += pack(PackStr, GetIntegerValue(Data))
+ else:
+ Buffer += PackGuid(Data)
+ else:
+ if PackStr:
+ Buffer += pack(PackStr, GetIntegerValue(Datas))
+ else:
+ Buffer += PackGuid(Datas)
+
+ return Buffer
+
+## DbExMapTblItemList
+#
+# The class holds the ExMap table
+#
+class DbExMapTblItemList (DbItemList):
+ def __init__(self, ItemSize, DataList=None, RawDataList=None):
+ DbItemList.__init__(self, ItemSize, DataList, RawDataList)
+
+ def PackData(self):
+ Buffer = bytearray()
+ PackStr = "=LHH"
+ for Datas in self.RawDataList:
+ Buffer += pack(PackStr,
+ GetIntegerValue(Datas[0]),
+ GetIntegerValue(Datas[1]),
+ GetIntegerValue(Datas[2]))
+ return Buffer
+
+## DbComItemList
+#
+# The DbComItemList is a special kind of DbItemList in case that the size of the List can not be computed by the
+# ItemSize multiply the ItemCount.
+#
+class DbComItemList (DbItemList):
+ def __init__(self, ItemSize, DataList=None, RawDataList=None):
+ DbItemList.__init__(self, ItemSize, DataList, RawDataList)
+
+ def GetInterOffset(self, Index):
+ Offset = 0
+ if self.ItemSize == 0:
+ #
+ # Variable length, need to calculate one by one
+ # The only variable table is stringtable, it is not Composite item, should not reach here
+ #
+ assert(False)
+ else:
+ assert(Index < len(self.RawDataList))
+ for ItemIndex in range(Index):
+ Offset += len(self.RawDataList[ItemIndex]) * self.ItemSize
+
+ return Offset
+
+ def GetListSize(self):
+ if self.ListSize:
+ return self.ListSize
+ if self.ItemSize == 0:
+ assert(False)
+ else:
+ if len(self.RawDataList) == 0:
+ self.ListSize = 0
+ else:
+ self.ListSize = self.GetInterOffset(len(self.RawDataList) - 1) + len(self.RawDataList[len(self.RawDataList)-1]) * self.ItemSize
+
+ return self.ListSize
+
+ def PackData(self):
+ PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
+
+ Buffer = bytearray()
+ for DataList in self.RawDataList:
+ for Data in DataList:
+ if type(Data) in (list, tuple):
+ for SingleData in Data:
+ Buffer += pack(PackStr, GetIntegerValue(SingleData))
+ else:
+ Buffer += pack(PackStr, GetIntegerValue(Data))
+
+ return Buffer
+
+## DbVariableTableItemList
+#
+# The class holds the Variable header value table
+#
+class DbVariableTableItemList (DbComItemList):
+ def __init__(self, ItemSize, DataList=None, RawDataList=None):
+ DbComItemList.__init__(self, ItemSize, DataList, RawDataList)
+
+ def PackData(self):
+ PackStr = "=LLHHLHH"
+ Buffer = bytearray()
+ for DataList in self.RawDataList:
+ for Data in DataList:
+ Buffer += pack(PackStr,
+ GetIntegerValue(Data[0]),
+ GetIntegerValue(Data[1]),
+ GetIntegerValue(Data[2]),
+ GetIntegerValue(Data[3]),
+ GetIntegerValue(Data[4]),
+ GetIntegerValue(Data[5]),
+ GetIntegerValue(0))
+ return Buffer
+
+class DbStringHeadTableItemList(DbItemList):
+ def __init__(self,ItemSize,DataList=None,RawDataList=None):
+ DbItemList.__init__(self, ItemSize, DataList, RawDataList)
+
+ def GetInterOffset(self, Index):
+ Offset = 0
+ if self.ItemSize == 0:
+ #
+ # Variable length, need to calculate one by one
+ #
+ assert(Index < len(self.RawDataList))
+ for ItemIndex in range(Index):
+ Offset += len(self.RawDataList[ItemIndex])
+ else:
+ for innerIndex in range(Index):
+ if type(self.RawDataList[innerIndex]) in (list, tuple):
+ Offset += len(self.RawDataList[innerIndex]) * self.ItemSize
+ else:
+ Offset += self.ItemSize
+
+ return Offset
+
+ def GetListSize(self):
+ if self.ListSize:
+ return self.ListSize
+ if len(self.RawDataList) == 0:
+ self.ListSize = 0
+ return self.ListSize
+ if self.ItemSize == 0:
+ self.ListSize = self.GetInterOffset(len(self.RawDataList) - 1) + len(self.RawDataList[len(self.RawDataList)-1])
+ else:
+ for Datas in self.RawDataList:
+ if type(Datas) in (list, tuple):
+ self.ListSize += len(Datas) * self.ItemSize
+ else:
+ self.ListSize += self.ItemSize
+ return self.ListSize
+
+## DbSkuHeadTableItemList
+#
+# The class holds the Sku header value table
+#
+class DbSkuHeadTableItemList (DbItemList):
+ def __init__(self, ItemSize, DataList=None, RawDataList=None):
+ DbItemList.__init__(self, ItemSize, DataList, RawDataList)
+
+ def PackData(self):
+ PackStr = "=LL"
+ Buffer = bytearray()
+ for Data in self.RawDataList:
+ Buffer += pack(PackStr,
+ GetIntegerValue(Data[0]),
+ GetIntegerValue(Data[1]))
+ return Buffer
+
+## DbSizeTableItemList
+#
+# The class holds the size table
+#
+class DbSizeTableItemList (DbItemList):
+ def __init__(self, ItemSize, DataList=None, RawDataList=None):
+ DbItemList.__init__(self, ItemSize, DataList, RawDataList)
+
+ def GetListSize(self):
+ length = 0
+ for Data in self.RawDataList:
+ length += (1 + len(Data[1]))
+ return length * self.ItemSize
+ def PackData(self):
+ PackStr = "=H"
+ Buffer = bytearray()
+ for Data in self.RawDataList:
+ Buffer += pack(PackStr,
+ GetIntegerValue(Data[0]))
+ for subData in Data[1]:
+ Buffer += pack(PackStr,
+ GetIntegerValue(subData))
+ return Buffer
+
+## DbStringItemList
+#
+# The class holds the string table
+#
+class DbStringItemList (DbComItemList):
+ def __init__(self, ItemSize, DataList=None, RawDataList=None, LenList=None):
+ if DataList is None:
+ DataList = []
+ if RawDataList is None:
+ RawDataList = []
+ if LenList is None:
+ LenList = []
+
+ assert(len(RawDataList) == len(LenList))
+ DataList = []
+ # adjust DataList according to the LenList
+ for Index in range(len(RawDataList)):
+ Len = LenList[Index]
+ RawDatas = RawDataList[Index]
+ assert(Len >= len(RawDatas))
+ ActualDatas = []
+ for i in range(len(RawDatas)):
+ ActualDatas.append(RawDatas[i])
+ for i in range(len(RawDatas), Len):
+ ActualDatas.append(0)
+ DataList.append(ActualDatas)
+ self.LenList = LenList
+ DbComItemList.__init__(self, ItemSize, DataList, RawDataList)
+ def GetInterOffset(self, Index):
+ Offset = 0
+
+ assert(Index < len(self.LenList))
+ for ItemIndex in range(Index):
+ Offset += self.LenList[ItemIndex]
+
+ return Offset
+
+ def GetListSize(self):
+ if self.ListSize:
+ return self.ListSize
+
+ if len(self.LenList) == 0:
+ self.ListSize = 0
+ else:
+ self.ListSize = self.GetInterOffset(len(self.LenList) - 1) + self.LenList[len(self.LenList)-1]
+
+ return self.ListSize
+
+ def PackData(self):
+ self.RawDataList = self.DataList
+ return DbComItemList.PackData(self)
+
+
+
+## Find the index in two list where the item matches the key separately
+#
+# @param Key1 The key used to search the List1
+# @param List1 The list that Key1 will be searched
+# @param Key2 The key used to search the List2
+# @param List2 The list that Key2 will be searched
+#
+# @retval Index The position inside the list where list1[Index] == Key1 and list2[Index] == Key2
+#
+def GetMatchedIndex(Key1, List1, Key2, List2):
+ StartPos = 0
+ while StartPos < len(List1):
+ Index = List1.index(Key1, StartPos)
+ if List2[Index] == Key2:
+ return Index
+ else:
+ StartPos = Index + 1
+
+ return -1
+
+
+## convert StringArray like {0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00}
+# to List like [0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00]
+#
+# @param StringArray A string array like {0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00}
+#
+# @retval A list object of integer items
+#
+def StringArrayToList(StringArray):
+ StringArray = StringArray[1:-1]
+ StringArray = '[' + StringArray + ']'
+ return eval(StringArray)
+
+
+## Convert TokenType String like "PCD_DATUM_TYPE_UINT32 | PCD_TYPE_HII" to TokenType value
+#
+# @param TokenType A TokenType string like "PCD_DATUM_TYPE_UINT32 | PCD_TYPE_HII"
+#
+# @retval A integer representation of the TokenType
+#
+def GetTokenTypeValue(TokenType):
+ TokenTypeDict = {
+ "PCD_TYPE_SHIFT": 28,
+ "PCD_TYPE_DATA": (0x0 << 28),
+ "PCD_TYPE_HII": (0x8 << 28),
+ "PCD_TYPE_VPD": (0x4 << 28),
+# "PCD_TYPE_SKU_ENABLED":(0x2 << 28),
+ "PCD_TYPE_STRING": (0x1 << 28),
+
+ "PCD_DATUM_TYPE_SHIFT": 24,
+ "PCD_DATUM_TYPE_POINTER": (0x0 << 24),
+ "PCD_DATUM_TYPE_UINT8": (0x1 << 24),
+ "PCD_DATUM_TYPE_UINT16": (0x2 << 24),
+ "PCD_DATUM_TYPE_UINT32": (0x4 << 24),
+ "PCD_DATUM_TYPE_UINT64": (0x8 << 24),
+
+ "PCD_DATUM_TYPE_SHIFT2": 20,
+ "PCD_DATUM_TYPE_UINT8_BOOLEAN": (0x1 << 20 | 0x1 << 24),
+ }
+ return eval(TokenType, TokenTypeDict)
+
+## construct the external Pcd database using data from Dict
+#
+# @param Dict A dictionary contains Pcd related tables
+#
+# @retval Buffer A byte stream of the Pcd database
+#
+def BuildExDataBase(Dict):
+ # init Db items
+ InitValueUint64 = Dict['INIT_DB_VALUE_UINT64']
+ DbInitValueUint64 = DbComItemList(8, RawDataList = InitValueUint64)
+ VardefValueUint64 = Dict['VARDEF_DB_VALUE_UINT64']
+ DbVardefValueUint64 = DbItemList(8, RawDataList = VardefValueUint64)
+ InitValueUint32 = Dict['INIT_DB_VALUE_UINT32']
+ DbInitValueUint32 = DbComItemList(4, RawDataList = InitValueUint32)
+ VardefValueUint32 = Dict['VARDEF_DB_VALUE_UINT32']
+ DbVardefValueUint32 = DbItemList(4, RawDataList = VardefValueUint32)
+ VpdHeadValue = Dict['VPD_DB_VALUE']
+ DbVpdHeadValue = DbComItemList(4, RawDataList = VpdHeadValue)
+ ExMapTable = list(zip(Dict['EXMAPPING_TABLE_EXTOKEN'], Dict['EXMAPPING_TABLE_LOCAL_TOKEN'], Dict['EXMAPPING_TABLE_GUID_INDEX']))
+ DbExMapTable = DbExMapTblItemList(8, RawDataList = ExMapTable)
+ LocalTokenNumberTable = Dict['LOCAL_TOKEN_NUMBER_DB_VALUE']
+ DbLocalTokenNumberTable = DbItemList(4, RawDataList = LocalTokenNumberTable)
+ GuidTable = Dict['GUID_STRUCTURE']
+ DbGuidTable = DbItemList(16, RawDataList = GuidTable)
+ StringHeadValue = Dict['STRING_DB_VALUE']
+ # DbItemList to DbStringHeadTableItemList
+ DbStringHeadValue = DbStringHeadTableItemList(4, RawDataList = StringHeadValue)
+ VariableTable = Dict['VARIABLE_DB_VALUE']
+ DbVariableTable = DbVariableTableItemList(20, RawDataList = VariableTable)
+ NumberOfSkuEnabledPcd = GetIntegerValue(Dict['SKU_HEAD_SIZE'])
+
+ Dict['STRING_TABLE_DB_VALUE'] = [StringArrayToList(x) for x in Dict['STRING_TABLE_VALUE']]
+
+ StringTableValue = Dict['STRING_TABLE_DB_VALUE']
+ # when calcute the offset, should use StringTableLen instead of StringTableValue, as string maximum len may be different with actual len
+ StringTableLen = Dict['STRING_TABLE_LENGTH']
+ DbStringTableLen = DbStringItemList(0, RawDataList = StringTableValue, LenList = StringTableLen)
+
+
+ PcdTokenTable = Dict['PCD_TOKENSPACE']
+ PcdTokenLen = Dict['PCD_TOKENSPACE_LENGTH']
+ PcdTokenTableValue = [StringArrayToList(x) for x in Dict['PCD_TOKENSPACE']]
+ DbPcdTokenTable = DbStringItemList(0, RawDataList = PcdTokenTableValue, LenList = PcdTokenLen)
+
+ PcdCNameTable = Dict['PCD_CNAME']
+ PcdCNameLen = Dict['PCD_CNAME_LENGTH']
+ PcdCNameTableValue = [StringArrayToList(x) for x in Dict['PCD_CNAME']]
+ DbPcdCNameTable = DbStringItemList(0, RawDataList = PcdCNameTableValue, LenList = PcdCNameLen)
+
+ PcdNameOffsetTable = Dict['PCD_NAME_OFFSET']
+ DbPcdNameOffsetTable = DbItemList(4, RawDataList = PcdNameOffsetTable)
+
+ SizeTableValue = list(zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH']))
+ DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue)
+ InitValueUint16 = Dict['INIT_DB_VALUE_UINT16']
+ DbInitValueUint16 = DbComItemList(2, RawDataList = InitValueUint16)
+ VardefValueUint16 = Dict['VARDEF_DB_VALUE_UINT16']
+ DbVardefValueUint16 = DbItemList(2, RawDataList = VardefValueUint16)
+ InitValueUint8 = Dict['INIT_DB_VALUE_UINT8']
+ DbInitValueUint8 = DbComItemList(1, RawDataList = InitValueUint8)
+ VardefValueUint8 = Dict['VARDEF_DB_VALUE_UINT8']
+ DbVardefValueUint8 = DbItemList(1, RawDataList = VardefValueUint8)
+ InitValueBoolean = Dict['INIT_DB_VALUE_BOOLEAN']
+ DbInitValueBoolean = DbComItemList(1, RawDataList = InitValueBoolean)
+ VardefValueBoolean = Dict['VARDEF_DB_VALUE_BOOLEAN']
+ DbVardefValueBoolean = DbItemList(1, RawDataList = VardefValueBoolean)
+ SkuidValue = Dict['SKUID_VALUE']
+ DbSkuidValue = DbItemList(8, RawDataList = SkuidValue)
+
+
+
+ # Unit Db Items
+ UnInitValueUint64 = Dict['UNINIT_GUID_DECL_UINT64']
+ DbUnInitValueUint64 = DbItemList(8, RawDataList = UnInitValueUint64)
+ UnInitValueUint32 = Dict['UNINIT_GUID_DECL_UINT32']
+ DbUnInitValueUint32 = DbItemList(4, RawDataList = UnInitValueUint32)
+ UnInitValueUint16 = Dict['UNINIT_GUID_DECL_UINT16']
+ DbUnInitValueUint16 = DbItemList(2, RawDataList = UnInitValueUint16)
+ UnInitValueUint8 = Dict['UNINIT_GUID_DECL_UINT8']
+ DbUnInitValueUint8 = DbItemList(1, RawDataList = UnInitValueUint8)
+ UnInitValueBoolean = Dict['UNINIT_GUID_DECL_BOOLEAN']
+ DbUnInitValueBoolean = DbItemList(1, RawDataList = UnInitValueBoolean)
+ PcdTokenNumberMap = Dict['PCD_ORDER_TOKEN_NUMBER_MAP']
+
+ DbNameTotle = ["SkuidValue", "InitValueUint64", "VardefValueUint64", "InitValueUint32", "VardefValueUint32", "VpdHeadValue", "ExMapTable",
+ "LocalTokenNumberTable", "GuidTable", "StringHeadValue", "PcdNameOffsetTable", "VariableTable", "StringTableLen", "PcdTokenTable", "PcdCNameTable",
+ "SizeTableValue", "InitValueUint16", "VardefValueUint16", "InitValueUint8", "VardefValueUint8", "InitValueBoolean",
+ "VardefValueBoolean", "UnInitValueUint64", "UnInitValueUint32", "UnInitValueUint16", "UnInitValueUint8", "UnInitValueBoolean"]
+
+ DbTotal = [SkuidValue, InitValueUint64, VardefValueUint64, InitValueUint32, VardefValueUint32, VpdHeadValue, ExMapTable,
+ LocalTokenNumberTable, GuidTable, StringHeadValue, PcdNameOffsetTable, VariableTable, StringTableLen, PcdTokenTable, PcdCNameTable,
+ SizeTableValue, InitValueUint16, VardefValueUint16, InitValueUint8, VardefValueUint8, InitValueBoolean,
+ VardefValueBoolean, UnInitValueUint64, UnInitValueUint32, UnInitValueUint16, UnInitValueUint8, UnInitValueBoolean]
+ DbItemTotal = [DbSkuidValue, DbInitValueUint64, DbVardefValueUint64, DbInitValueUint32, DbVardefValueUint32, DbVpdHeadValue, DbExMapTable,
+ DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue, DbPcdNameOffsetTable, DbVariableTable, DbStringTableLen, DbPcdTokenTable, DbPcdCNameTable,
+ DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16, DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean,
+ DbVardefValueBoolean, DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean]
+
+ # VardefValueBoolean is the last table in the init table items
+ InitTableNum = DbNameTotle.index("VardefValueBoolean") + 1
+ # The FixedHeader length of the PCD_DATABASE_INIT, from Signature to Pad
+ FixedHeaderLen = 80
+
+ # Get offset of SkuId table in the database
+ SkuIdTableOffset = FixedHeaderLen
+ for DbIndex in range(len(DbTotal)):
+ if DbTotal[DbIndex] is SkuidValue:
+ break
+ SkuIdTableOffset += DbItemTotal[DbIndex].GetListSize()
+
+
+ # Get offset of SkuValue table in the database
+
+ # Fix up the LocalTokenNumberTable, SkuHeader table
+ for (LocalTokenNumberTableIndex, (Offset, Table)) in enumerate(LocalTokenNumberTable):
+ DbIndex = 0
+ DbOffset = FixedHeaderLen
+ for DbIndex in range(len(DbTotal)):
+ if DbTotal[DbIndex] is Table:
+ DbOffset += DbItemTotal[DbIndex].GetInterOffset(Offset)
+ break
+ DbOffset += DbItemTotal[DbIndex].GetListSize()
+ if DbIndex + 1 == InitTableNum:
+ if DbOffset % 8:
+ DbOffset += (8 - DbOffset % 8)
+ else:
+ assert(False)
+
+ TokenTypeValue = Dict['TOKEN_TYPE'][LocalTokenNumberTableIndex]
+ TokenTypeValue = GetTokenTypeValue(TokenTypeValue)
+ LocalTokenNumberTable[LocalTokenNumberTableIndex] = DbOffset|int(TokenTypeValue)
+ # if PCD_TYPE_SKU_ENABLED, then we need to fix up the SkuTable
+
+
+
+
+ # resolve variable table offset
+ for VariableEntries in VariableTable:
+ skuindex = 0
+ for VariableEntryPerSku in VariableEntries:
+ (VariableHeadGuidIndex, VariableHeadStringIndex, SKUVariableOffset, VariableOffset, VariableRefTable, VariableAttribute) = VariableEntryPerSku[:]
+ DbIndex = 0
+ DbOffset = FixedHeaderLen
+ for DbIndex in range(len(DbTotal)):
+ if DbTotal[DbIndex] is VariableRefTable:
+ DbOffset += DbItemTotal[DbIndex].GetInterOffset(VariableOffset)
+ break
+ DbOffset += DbItemTotal[DbIndex].GetListSize()
+ if DbIndex + 1 == InitTableNum:
+ if DbOffset % 8:
+ DbOffset += (8 - DbOffset % 8)
+ else:
+ assert(False)
+ if isinstance(VariableRefTable[0], list):
+ DbOffset += skuindex * 4
+ skuindex += 1
+ if DbIndex >= InitTableNum:
+ assert(False)
+ VarAttr, VarProp = VariableAttributes.GetVarAttributes(VariableAttribute)
+ VariableEntryPerSku[:] = (VariableHeadStringIndex, DbOffset, VariableHeadGuidIndex, SKUVariableOffset, VarAttr, VarProp)
+
+ # calculate various table offset now
+ DbTotalLength = FixedHeaderLen
+ for DbIndex in range(len(DbItemTotal)):
+ if DbItemTotal[DbIndex] is DbLocalTokenNumberTable:
+ LocalTokenNumberTableOffset = DbTotalLength
+ elif DbItemTotal[DbIndex] is DbExMapTable:
+ ExMapTableOffset = DbTotalLength
+ elif DbItemTotal[DbIndex] is DbGuidTable:
+ GuidTableOffset = DbTotalLength
+ elif DbItemTotal[DbIndex] is DbStringTableLen:
+ StringTableOffset = DbTotalLength
+ elif DbItemTotal[DbIndex] is DbSizeTableValue:
+ SizeTableOffset = DbTotalLength
+ elif DbItemTotal[DbIndex] is DbSkuidValue:
+ SkuIdTableOffset = DbTotalLength
+ elif DbItemTotal[DbIndex] is DbPcdNameOffsetTable:
+ DbPcdNameOffset = DbTotalLength
+
+
+ DbTotalLength += DbItemTotal[DbIndex].GetListSize()
+ if not Dict['PCD_INFO_FLAG']:
+ DbPcdNameOffset = 0
+ LocalTokenCount = GetIntegerValue(Dict['LOCAL_TOKEN_NUMBER'])
+ ExTokenCount = GetIntegerValue(Dict['EX_TOKEN_NUMBER'])
+ GuidTableCount = GetIntegerValue(Dict['GUID_TABLE_SIZE'])
+ SystemSkuId = GetIntegerValue(Dict['SYSTEM_SKU_ID_VALUE'])
+ Pad = 0xDA
+
+ UninitDataBaseSize = 0
+ for Item in (DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean):
+ UninitDataBaseSize += Item.GetListSize()
+
+ if (DbTotalLength - UninitDataBaseSize) % 8:
+ DbTotalLength += (8 - (DbTotalLength - UninitDataBaseSize) % 8)
+ # Construct the database buffer
+ Guid = "{0x3c7d193c, 0x682c, 0x4c14, 0xa6, 0x8f, 0x55, 0x2d, 0xea, 0x4f, 0x43, 0x7e}"
+ Guid = StringArrayToList(Guid)
+ Buffer = PackByteFormatGUID(Guid)
+
+ b = pack("=L", DATABASE_VERSION)
+ Buffer += b
+
+ b = pack('=L', DbTotalLength - UninitDataBaseSize)
+
+ Buffer += b
+ b = pack('=Q', SystemSkuId)
+
+ Buffer += b
+ b = pack('=L', 0)
+
+ Buffer += b
+ b = pack('=L', UninitDataBaseSize)
+
+ Buffer += b
+ b = pack('=L', LocalTokenNumberTableOffset)
+
+ Buffer += b
+ b = pack('=L', ExMapTableOffset)
+
+ Buffer += b
+ b = pack('=L', GuidTableOffset)
+
+ Buffer += b
+ b = pack('=L', StringTableOffset)
+
+ Buffer += b
+ b = pack('=L', SizeTableOffset)
+
+ Buffer += b
+ b = pack('=L', SkuIdTableOffset)
+
+ Buffer += b
+ b = pack('=L', DbPcdNameOffset)
+
+ Buffer += b
+ b = pack('=H', LocalTokenCount)
+
+ Buffer += b
+ b = pack('=H', ExTokenCount)
+
+ Buffer += b
+ b = pack('=H', GuidTableCount)
+
+ Buffer += b
+ b = pack('=B', Pad)
+ Buffer += b
+ Buffer += b
+ Buffer += b
+ Buffer += b
+ Buffer += b
+ Buffer += b
+
+ Index = 0
+ for Item in DbItemTotal:
+ Index +=1
+ packdata = Item.PackData()
+ for i in range(len(packdata)):
+ Buffer += packdata[i:i + 1]
+ if Index == InitTableNum:
+ if len(Buffer) % 8:
+ for num in range(8 - len(Buffer) % 8):
+ b = pack('=B', Pad)
+ Buffer += b
+ break
+ return Buffer
+
+## Create code for PCD database
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreatePcdDatabaseCode (Info, AutoGenC, AutoGenH):
+ if Info.PcdIsDriver == "":
+ return
+ if Info.PcdIsDriver not in gPcdPhaseMap:
+ EdkLogger.error("build", AUTOGEN_ERROR, "Not supported PcdIsDriver type:%s" % Info.PcdIsDriver,
+ ExtraData="[%s]" % str(Info))
+
+ AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer = NewCreatePcdDatabasePhaseSpecificAutoGen (Info.PlatformInfo, 'PEI')
+ AutoGenH.Append(AdditionalAutoGenH.String)
+
+ Phase = gPcdPhaseMap[Info.PcdIsDriver]
+ if Phase == 'PEI':
+ AutoGenC.Append(AdditionalAutoGenC.String)
+
+ if Phase == 'DXE':
+ AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer = NewCreatePcdDatabasePhaseSpecificAutoGen (Info.PlatformInfo, Phase)
+ AutoGenH.Append(AdditionalAutoGenH.String)
+ AutoGenC.Append(AdditionalAutoGenC.String)
+
+ if Info.IsBinaryModule:
+ DbFileName = os.path.join(Info.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, Phase + "PcdDataBase.raw")
+ else:
+ DbFileName = os.path.join(Info.OutputDir, Phase + "PcdDataBase.raw")
+ DbFile = BytesIO()
+ DbFile.write(PcdDbBuffer)
+ Changed = SaveFileOnChange(DbFileName, DbFile.getvalue(), True)
+def CreatePcdDataBase(PcdDBData):
+ delta = {}
+ for skuname, skuid in PcdDBData:
+ if len(PcdDBData[(skuname, skuid)][1]) != len(PcdDBData[(TAB_DEFAULT, "0")][1]):
+ EdkLogger.error("build", AUTOGEN_ERROR, "The size of each sku in one pcd are not same")
+ for skuname, skuid in PcdDBData:
+ if skuname == TAB_DEFAULT:
+ continue
+ delta[(skuname, skuid)] = [(index, data, hex(data)) for index, data in enumerate(PcdDBData[(skuname, skuid)][1]) if PcdDBData[(skuname, skuid)][1][index] != PcdDBData[(TAB_DEFAULT, "0")][1][index]]
+ databasebuff = PcdDBData[(TAB_DEFAULT, "0")][0]
+
+ for skuname, skuid in delta:
+ # 8 byte align
+ if len(databasebuff) % 8 > 0:
+ for i in range(8 - (len(databasebuff) % 8)):
+ databasebuff += pack("=B", 0)
+ databasebuff += pack('=Q', int(skuid))
+ databasebuff += pack('=Q', 0)
+ databasebuff += pack('=L', 8+8+4+4*len(delta[(skuname, skuid)]))
+ for item in delta[(skuname, skuid)]:
+ databasebuff += pack("=L", item[0])
+ databasebuff = databasebuff[:-1] + pack("=B", item[1])
+ totallen = len(databasebuff)
+ totallenbuff = pack("=L", totallen)
+ newbuffer = databasebuff[:32]
+ for i in range(4):
+ newbuffer += totallenbuff[i:i+1]
+ for i in range(36, totallen):
+ newbuffer += databasebuff[i:i+1]
+
+ return newbuffer
+
+def CreateVarCheckBin(VarCheckTab):
+ return VarCheckTab[(TAB_DEFAULT, "0")]
+
+def CreateAutoGen(PcdDriverAutoGenData):
+ autogenC = TemplateString()
+ for skuname, skuid in PcdDriverAutoGenData:
+ autogenC.Append("//SKUID: %s" % skuname)
+ autogenC.Append(PcdDriverAutoGenData[(skuname, skuid)][1].String)
+ return (PcdDriverAutoGenData[(skuname, skuid)][0], autogenC)
+def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
+ def prune_sku(pcd, skuname):
+ new_pcd = copy.deepcopy(pcd)
+ new_pcd.SkuInfoList = {skuname:pcd.SkuInfoList[skuname]}
+ new_pcd.isinit = 'INIT'
+ if new_pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
+ for skuobj in pcd.SkuInfoList.values():
+ if skuobj.DefaultValue:
+ defaultvalue = int(skuobj.DefaultValue, 16) if skuobj.DefaultValue.upper().startswith("0X") else int(skuobj.DefaultValue, 10)
+ if defaultvalue != 0:
+ new_pcd.isinit = "INIT"
+ break
+ elif skuobj.VariableName:
+ new_pcd.isinit = "INIT"
+ break
+ else:
+ new_pcd.isinit = "UNINIT"
+ return new_pcd
+ DynamicPcds = Platform.DynamicPcdList
+ DynamicPcdSet_Sku = {(SkuName, skuobj.SkuId):[] for pcd in DynamicPcds for (SkuName, skuobj) in pcd.SkuInfoList.items() }
+ for skuname, skuid in DynamicPcdSet_Sku:
+ DynamicPcdSet_Sku[(skuname, skuid)] = [prune_sku(pcd, skuname) for pcd in DynamicPcds]
+ PcdDBData = {}
+ PcdDriverAutoGenData = {}
+ VarCheckTableData = {}
+ if DynamicPcdSet_Sku:
+ for skuname, skuid in DynamicPcdSet_Sku:
+ AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase)
+ final_data = ()
+ for item in range(len(PcdDbBuffer)):
+ final_data += unpack("B", PcdDbBuffer[item:item+1])
+ PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data)
+ PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC)
+ VarCheckTableData[(skuname, skuid)] = VarCheckTab
+ if Platform.Platform.VarCheckFlag:
+ dest = os.path.join(Platform.BuildDir, TAB_FV_DIRECTORY)
+ VarCheckTable = CreateVarCheckBin(VarCheckTableData)
+ VarCheckTable.dump(dest, Phase)
+ AdditionalAutoGenH, AdditionalAutoGenC = CreateAutoGen(PcdDriverAutoGenData)
+ else:
+ AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase)
+ final_data = ()
+ for item in range(len(PcdDbBuffer)):
+ final_data += unpack("B", PcdDbBuffer[item:item + 1])
+ PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data)
+
+ return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData)
+## Create PCD database in DXE or PEI phase
+#
+# @param Platform The platform object
+# @retval tuple Two TemplateString objects for C code and header file,
+# respectively
+#
+def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
+ AutoGenC = TemplateString()
+ AutoGenH = TemplateString()
+
+ Dict = {
+ 'PHASE' : Phase,
+ 'SERVICE_DRIVER_VERSION' : DATABASE_VERSION,
+ 'GUID_TABLE_SIZE' : '1U',
+ 'STRING_TABLE_SIZE' : '1U',
+ 'SKUID_TABLE_SIZE' : '1U',
+ 'LOCAL_TOKEN_NUMBER_TABLE_SIZE' : '0U',
+ 'LOCAL_TOKEN_NUMBER' : '0U',
+ 'EXMAPPING_TABLE_SIZE' : '1U',
+ 'EX_TOKEN_NUMBER' : '0U',
+ 'SIZE_TABLE_SIZE' : '2U',
+ 'SKU_HEAD_SIZE' : '1U',
+ 'GUID_TABLE_EMPTY' : 'TRUE',
+ 'STRING_TABLE_EMPTY' : 'TRUE',
+ 'SKUID_TABLE_EMPTY' : 'TRUE',
+ 'DATABASE_EMPTY' : 'TRUE',
+ 'EXMAP_TABLE_EMPTY' : 'TRUE',
+ 'PCD_DATABASE_UNINIT_EMPTY' : ' UINT8 dummy; /* PCD_DATABASE_UNINIT is empty */',
+ 'SYSTEM_SKU_ID' : ' SKU_ID SystemSkuId;',
+ 'SYSTEM_SKU_ID_VALUE' : '0U'
+ }
+
+ SkuObj = Platform.Platform.SkuIdMgr
+ Dict['SYSTEM_SKU_ID_VALUE'] = 0 if SkuObj.SkuUsageType == SkuObj.SINGLE else Platform.Platform.SkuIds[SkuObj.SystemSkuId][0]
+
+ Dict['PCD_INFO_FLAG'] = Platform.Platform.PcdInfoFlag
+
+ for DatumType in TAB_PCD_NUMERIC_TYPES_VOID:
+ Dict['VARDEF_CNAME_' + DatumType] = []
+ Dict['VARDEF_GUID_' + DatumType] = []
+ Dict['VARDEF_SKUID_' + DatumType] = []
+ Dict['VARDEF_VALUE_' + DatumType] = []
+ Dict['VARDEF_DB_VALUE_' + DatumType] = []
+ for Init in ['INIT', 'UNINIT']:
+ Dict[Init+'_CNAME_DECL_' + DatumType] = []
+ Dict[Init+'_GUID_DECL_' + DatumType] = []
+ Dict[Init+'_NUMSKUS_DECL_' + DatumType] = []
+ Dict[Init+'_VALUE_' + DatumType] = []
+ Dict[Init+'_DB_VALUE_'+DatumType] = []
+
+ for Type in ['STRING_HEAD', 'VPD_HEAD', 'VARIABLE_HEAD']:
+ Dict[Type + '_CNAME_DECL'] = []
+ Dict[Type + '_GUID_DECL'] = []
+ Dict[Type + '_NUMSKUS_DECL'] = []
+ Dict[Type + '_VALUE'] = []
+
+ Dict['STRING_DB_VALUE'] = []
+ Dict['VPD_DB_VALUE'] = []
+ Dict['VARIABLE_DB_VALUE'] = []
+
+ Dict['STRING_TABLE_INDEX'] = []
+ Dict['STRING_TABLE_LENGTH'] = []
+ Dict['STRING_TABLE_CNAME'] = []
+ Dict['STRING_TABLE_GUID'] = []
+ Dict['STRING_TABLE_VALUE'] = []
+ Dict['STRING_TABLE_DB_VALUE'] = []
+
+ Dict['SIZE_TABLE_CNAME'] = []
+ Dict['SIZE_TABLE_GUID'] = []
+ Dict['SIZE_TABLE_CURRENT_LENGTH'] = []
+ Dict['SIZE_TABLE_MAXIMUM_LENGTH'] = []
+
+ Dict['EXMAPPING_TABLE_EXTOKEN'] = []
+ Dict['EXMAPPING_TABLE_LOCAL_TOKEN'] = []
+ Dict['EXMAPPING_TABLE_GUID_INDEX'] = []
+
+ Dict['GUID_STRUCTURE'] = []
+ Dict['SKUID_VALUE'] = [0] # init Dict length
+ Dict['VARDEF_HEADER'] = []
+
+ Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'] = []
+ Dict['VARIABLE_DB_VALUE'] = []
+
+ Dict['PCD_TOKENSPACE'] = []
+ Dict['PCD_CNAME'] = []
+ Dict['PCD_TOKENSPACE_LENGTH'] = []
+ Dict['PCD_CNAME_LENGTH'] = []
+ Dict['PCD_TOKENSPACE_OFFSET'] = []
+ Dict['PCD_CNAME_OFFSET'] = []
+ Dict['PCD_TOKENSPACE_MAP'] = []
+ Dict['PCD_NAME_OFFSET'] = []
+
+ Dict['PCD_ORDER_TOKEN_NUMBER_MAP'] = {}
+ PCD_STRING_INDEX_MAP = {}
+
+ StringTableIndex = 0
+ StringTableSize = 0
+ NumberOfLocalTokens = 0
+ NumberOfPeiLocalTokens = 0
+ NumberOfDxeLocalTokens = 0
+ NumberOfExTokens = 0
+ NumberOfSizeItems = 0
+ NumberOfSkuEnabledPcd = 0
+ GuidList = []
+ VarCheckTab = VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER()
+ i = 0
+ ReorderedDynPcdList = GetOrderedDynamicPcdList(DynamicPcdList, Platform.PcdTokenNumber)
+ for item in ReorderedDynPcdList:
+ if item.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
+ item.DatumType = TAB_VOID
+ for Pcd in ReorderedDynPcdList:
+ VoidStarTypeCurrSize = []
+ i += 1
+ CName = Pcd.TokenCName
+ TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
+
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ CName = PcdItem[0]
+
+ EdkLogger.debug(EdkLogger.DEBUG_3, "PCD: %s %s (%s : %s)" % (CName, TokenSpaceGuidCName, Pcd.Phase, Phase))
+
+ if Pcd.Phase == 'PEI':
+ NumberOfPeiLocalTokens += 1
+ if Pcd.Phase == 'DXE':
+ NumberOfDxeLocalTokens += 1
+ if Pcd.Phase != Phase:
+ continue
+
+ #
+ # TODO: need GetGuidValue() definition
+ #
+ TokenSpaceGuidStructure = Pcd.TokenSpaceGuidValue
+ TokenSpaceGuid = GuidStructureStringToGuidValueName(TokenSpaceGuidStructure)
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ if TokenSpaceGuid not in GuidList:
+ GuidList.append(TokenSpaceGuid)
+ Dict['GUID_STRUCTURE'].append(TokenSpaceGuidStructure)
+ NumberOfExTokens += 1
+
+ ValueList = []
+ DbValueList = []
+ StringHeadOffsetList = []
+ StringDbOffsetList = []
+ VpdHeadOffsetList = []
+ VpdDbOffsetList = []
+ VariableHeadValueList = []
+ VariableDbValueList = []
+ Pcd.InitString = 'UNINIT'
+
+ if Pcd.DatumType == TAB_VOID:
+ if Pcd.Type not in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
+ Pcd.TokenTypeList = ['PCD_TYPE_STRING']
+ else:
+ Pcd.TokenTypeList = []
+ elif Pcd.DatumType == 'BOOLEAN':
+ Pcd.TokenTypeList = ['PCD_DATUM_TYPE_UINT8_BOOLEAN']
+ else:
+ Pcd.TokenTypeList = ['PCD_DATUM_TYPE_' + Pcd.DatumType]
+
+ if len(Pcd.SkuInfoList) > 1:
+ NumberOfSkuEnabledPcd += 1
+
+ SkuIdIndex = 1
+ VariableHeadList = []
+ for SkuName in Pcd.SkuInfoList:
+ Sku = Pcd.SkuInfoList[SkuName]
+ SkuId = Sku.SkuId
+ if SkuId is None or SkuId == '':
+ continue
+
+
+ SkuIdIndex += 1
+
+ if len(Sku.VariableName) > 0:
+ VariableGuidStructure = Sku.VariableGuidValue
+ VariableGuid = GuidStructureStringToGuidValueName(VariableGuidStructure)
+ if Platform.Platform.VarCheckFlag:
+ var_check_obj = VAR_CHECK_PCD_VARIABLE_TAB(VariableGuidStructure, StringToArray(Sku.VariableName))
+ try:
+ var_check_obj.push_back(GetValidationObject(Pcd, Sku.VariableOffset))
+ VarAttr, _ = VariableAttributes.GetVarAttributes(Sku.VariableAttribute)
+ var_check_obj.SetAttributes(VarAttr)
+ var_check_obj.UpdateSize()
+ VarCheckTab.push_back(var_check_obj)
+ except Exception:
+ ValidInfo = ''
+ if Pcd.validateranges:
+ ValidInfo = Pcd.validateranges[0]
+ if Pcd.validlists:
+ ValidInfo = Pcd.validlists[0]
+ if ValidInfo:
+ EdkLogger.error("build", PCD_VALIDATION_INFO_ERROR,
+ "The PCD '%s.%s' Validation information defined in DEC file has incorrect format." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
+ ExtraData = "[%s]" % str(ValidInfo))
+ else:
+ EdkLogger.error("build", PCD_VALIDATION_INFO_ERROR,
+ "The PCD '%s.%s' Validation information defined in DEC file has incorrect format." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ Pcd.TokenTypeList.append('PCD_TYPE_HII')
+ Pcd.InitString = 'INIT'
+ # Store all variable names of one HII PCD under different SKU to stringTable
+ # and calculate the VariableHeadStringIndex
+
+ VariableNameStructure = StringToArray(Sku.VariableName)
+
+ # Make pointer of VaraibleName(HII PCD) 2 bytes aligned
+ VariableNameStructureBytes = VariableNameStructure.lstrip("{").rstrip("}").split(",")
+ if len(VariableNameStructureBytes) % 2:
+ VariableNameStructure = "{%s,0x00}" % ",".join(VariableNameStructureBytes)
+
+ if VariableNameStructure not in Dict['STRING_TABLE_VALUE']:
+ Dict['STRING_TABLE_CNAME'].append(CName)
+ Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid)
+ if StringTableIndex == 0:
+ Dict['STRING_TABLE_INDEX'].append('')
+ else:
+ Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
+ VarNameSize = len(VariableNameStructure.replace(',', ' ').split())
+ Dict['STRING_TABLE_LENGTH'].append(VarNameSize )
+ Dict['STRING_TABLE_VALUE'].append(VariableNameStructure)
+ StringHeadOffsetList.append(str(StringTableSize) + 'U')
+ VarStringDbOffsetList = []
+ VarStringDbOffsetList.append(StringTableSize)
+ Dict['STRING_DB_VALUE'].append(VarStringDbOffsetList)
+ StringTableIndex += 1
+ StringTableSize += len(VariableNameStructure.replace(',', ' ').split())
+ VariableHeadStringIndex = 0
+ for Index in range(Dict['STRING_TABLE_VALUE'].index(VariableNameStructure)):
+ VariableHeadStringIndex += Dict['STRING_TABLE_LENGTH'][Index]
+ VariableHeadList.append(VariableHeadStringIndex)
+
+ VariableHeadStringIndex = VariableHeadList[SkuIdIndex - 2]
+ # store VariableGuid to GuidTable and get the VariableHeadGuidIndex
+
+ if VariableGuid not in GuidList:
+ GuidList.append(VariableGuid)
+ Dict['GUID_STRUCTURE'].append(VariableGuidStructure)
+ VariableHeadGuidIndex = GuidList.index(VariableGuid)
+
+ if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
+ VariableHeadValueList.append('%dU, offsetof(%s_PCD_DATABASE, Init.%s_%s), %dU, %sU' %
+ (VariableHeadStringIndex, Phase, CName, TokenSpaceGuid,
+ VariableHeadGuidIndex, Sku.VariableOffset))
+ else:
+ VariableHeadValueList.append('%dU, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s), %dU, %sU' %
+ (VariableHeadStringIndex, Phase, CName, TokenSpaceGuid, SkuIdIndex,
+ VariableHeadGuidIndex, Sku.VariableOffset))
+ Dict['VARDEF_CNAME_'+Pcd.DatumType].append(CName)
+ Dict['VARDEF_GUID_'+Pcd.DatumType].append(TokenSpaceGuid)
+ Dict['VARDEF_SKUID_'+Pcd.DatumType].append(SkuIdIndex)
+ if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
+ Dict['VARDEF_VALUE_' + Pcd.DatumType].append("%s_%s[%d]" % (Pcd.TokenCName, TokenSpaceGuid, SkuIdIndex))
+ else:
+ #
+ # ULL (for UINT64) or U(other integer type) should be append to avoid
+ # warning under linux building environment.
+ #
+ Dict['VARDEF_DB_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue)
+
+ if Pcd.DatumType == TAB_UINT64:
+ Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "ULL")
+ elif Pcd.DatumType in (TAB_UINT32, TAB_UINT16, TAB_UINT8):
+ Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "U")
+ elif Pcd.DatumType == "BOOLEAN":
+ if eval(Sku.HiiDefaultValue) in [1, 0]:
+ Dict['VARDEF_VALUE_'+Pcd.DatumType].append(str(eval(Sku.HiiDefaultValue)) + "U")
+ else:
+ Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue)
+
+ # construct the VariableHeader value
+ if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
+ VariableHeadValueList.append('%dU, %dU, %sU, offsetof(%s_PCD_DATABASE, Init.%s_%s)' %
+ (VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset,
+ Phase, CName, TokenSpaceGuid))
+ # the Pcd default value will be filled later on
+ VariableOffset = len(Dict['STRING_DB_VALUE'])
+ VariableRefTable = Dict['STRING_DB_VALUE']
+ else:
+ VariableHeadValueList.append('%dU, %dU, %sU, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s)' %
+ (VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset,
+ Phase, CName, TokenSpaceGuid, SkuIdIndex))
+ # the Pcd default value was filled before
+ VariableOffset = len(Dict['VARDEF_DB_VALUE_' + Pcd.DatumType]) - 1
+ VariableRefTable = Dict['VARDEF_DB_VALUE_' + Pcd.DatumType]
+ VariableDbValueList.append([VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset, VariableOffset, VariableRefTable, Sku.VariableAttribute])
+
+ elif Sku.VpdOffset != '':
+ Pcd.TokenTypeList.append('PCD_TYPE_VPD')
+ Pcd.InitString = 'INIT'
+ VpdHeadOffsetList.append(str(Sku.VpdOffset) + 'U')
+ VpdDbOffsetList.append(Sku.VpdOffset)
+ # Also add the VOID* string of VPD PCD to SizeTable
+ if Pcd.DatumType == TAB_VOID:
+ NumberOfSizeItems += 1
+ # For VPD type of PCD, its current size is equal to its MAX size.
+ VoidStarTypeCurrSize = [str(Pcd.MaxDatumSize) + 'U']
+ continue
+
+ if Pcd.DatumType == TAB_VOID:
+ Pcd.TokenTypeList.append('PCD_TYPE_STRING')
+ Pcd.InitString = 'INIT'
+ if Sku.HiiDefaultValue != '' and Sku.DefaultValue == '':
+ Sku.DefaultValue = Sku.HiiDefaultValue
+ if Sku.DefaultValue != '':
+ NumberOfSizeItems += 1
+ Dict['STRING_TABLE_CNAME'].append(CName)
+ Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid)
+
+ if StringTableIndex == 0:
+ Dict['STRING_TABLE_INDEX'].append('')
+ else:
+ Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
+ if Sku.DefaultValue[0] == 'L':
+ DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
+ Size = len(DefaultValueBinStructure.replace(',', ' ').split())
+ Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
+ elif Sku.DefaultValue[0] == '"':
+ DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
+ Size = len(Sku.DefaultValue) - 2 + 1
+ Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
+ elif Sku.DefaultValue[0] == '{':
+ DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
+ Size = len(Sku.DefaultValue.split(","))
+ Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
+
+ StringHeadOffsetList.append(str(StringTableSize) + 'U')
+ StringDbOffsetList.append(StringTableSize)
+ if Pcd.MaxDatumSize != '':
+ MaxDatumSize = int(Pcd.MaxDatumSize, 0)
+ if MaxDatumSize < Size:
+ if Pcd.MaxSizeUserSet:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
+ ExtraData="[%s]" % str(Platform))
+ else:
+ MaxDatumSize = Size
+ else:
+ MaxDatumSize = Size
+ StringTabLen = MaxDatumSize
+ if StringTabLen % 2:
+ StringTabLen += 1
+ if Sku.VpdOffset == '':
+ VoidStarTypeCurrSize.append(str(Size) + 'U')
+ Dict['STRING_TABLE_LENGTH'].append(StringTabLen)
+ StringTableIndex += 1
+ StringTableSize += (StringTabLen)
+ else:
+ if "PCD_TYPE_HII" not in Pcd.TokenTypeList:
+ Pcd.TokenTypeList.append('PCD_TYPE_DATA')
+ if Sku.DefaultValue == 'TRUE':
+ Pcd.InitString = 'INIT'
+ else:
+ Pcd.InitString = Pcd.isinit
+ #
+ # For UNIT64 type PCD's value, ULL should be append to avoid
+ # warning under linux building environment.
+ #
+ if Pcd.DatumType == TAB_UINT64:
+ ValueList.append(Sku.DefaultValue + "ULL")
+ elif Pcd.DatumType in (TAB_UINT32, TAB_UINT16, TAB_UINT8):
+ ValueList.append(Sku.DefaultValue + "U")
+ elif Pcd.DatumType == "BOOLEAN":
+ if Sku.DefaultValue in ["1", "0"]:
+ ValueList.append(Sku.DefaultValue + "U")
+ else:
+ ValueList.append(Sku.DefaultValue)
+
+ DbValueList.append(Sku.DefaultValue)
+
+ Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
+ if Pcd.DatumType == TAB_VOID:
+ Dict['SIZE_TABLE_CNAME'].append(CName)
+ Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
+ Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
+ Dict['SIZE_TABLE_CURRENT_LENGTH'].append(VoidStarTypeCurrSize)
+
+
+
+ if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
+ Dict['VARIABLE_HEAD_CNAME_DECL'].append(CName)
+ Dict['VARIABLE_HEAD_GUID_DECL'].append(TokenSpaceGuid)
+ Dict['VARIABLE_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
+ Dict['VARIABLE_HEAD_VALUE'].append('{ %s }\n' % ' },\n { '.join(VariableHeadValueList))
+ Dict['VARDEF_HEADER'].append('_Variable_Header')
+ Dict['VARIABLE_DB_VALUE'].append(VariableDbValueList)
+ else:
+ Dict['VARDEF_HEADER'].append('')
+ if 'PCD_TYPE_VPD' in Pcd.TokenTypeList:
+ Dict['VPD_HEAD_CNAME_DECL'].append(CName)
+ Dict['VPD_HEAD_GUID_DECL'].append(TokenSpaceGuid)
+ Dict['VPD_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
+ Dict['VPD_HEAD_VALUE'].append('{ %s }' % ' }, { '.join(VpdHeadOffsetList))
+ Dict['VPD_DB_VALUE'].append(VpdDbOffsetList)
+ if 'PCD_TYPE_STRING' in Pcd.TokenTypeList:
+ Dict['STRING_HEAD_CNAME_DECL'].append(CName)
+ Dict['STRING_HEAD_GUID_DECL'].append(TokenSpaceGuid)
+ Dict['STRING_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
+ Dict['STRING_HEAD_VALUE'].append(', '.join(StringHeadOffsetList))
+ Dict['STRING_DB_VALUE'].append(StringDbOffsetList)
+ PCD_STRING_INDEX_MAP[len(Dict['STRING_HEAD_CNAME_DECL']) -1 ] = len(Dict['STRING_DB_VALUE']) -1
+ if 'PCD_TYPE_DATA' in Pcd.TokenTypeList:
+ Dict[Pcd.InitString+'_CNAME_DECL_'+Pcd.DatumType].append(CName)
+ Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType].append(TokenSpaceGuid)
+ Dict[Pcd.InitString+'_NUMSKUS_DECL_'+Pcd.DatumType].append(len(Pcd.SkuInfoList))
+ if Pcd.InitString == 'UNINIT':
+ Dict['PCD_DATABASE_UNINIT_EMPTY'] = ''
+ else:
+ Dict[Pcd.InitString+'_VALUE_'+Pcd.DatumType].append(', '.join(ValueList))
+ Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType].append(DbValueList)
+
+ if Phase == 'PEI':
+ NumberOfLocalTokens = NumberOfPeiLocalTokens
+ if Phase == 'DXE':
+ NumberOfLocalTokens = NumberOfDxeLocalTokens
+
+ Dict['TOKEN_INIT'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['TOKEN_CNAME'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['TOKEN_GUID'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['TOKEN_TYPE'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['PCD_CNAME'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['PCD_TOKENSPACE_MAP'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['PCD_CNAME_LENGTH'] = [0 for x in range(NumberOfLocalTokens)]
+ SkuEnablePcdIndex = 0
+ for Pcd in ReorderedDynPcdList:
+ CName = Pcd.TokenCName
+ TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
+ if Pcd.Phase != Phase:
+ continue
+
+ TokenSpaceGuid = GuidStructureStringToGuidValueName(Pcd.TokenSpaceGuidValue) #(Platform.PackageList, TokenSpaceGuidCName))
+ GeneratedTokenNumber = Platform.PcdTokenNumber[CName, TokenSpaceGuidCName] - 1
+ if Phase == 'DXE':
+ GeneratedTokenNumber -= NumberOfPeiLocalTokens
+
+ if len(Pcd.SkuInfoList) > 1:
+ Dict['PCD_ORDER_TOKEN_NUMBER_MAP'][GeneratedTokenNumber] = SkuEnablePcdIndex
+ SkuEnablePcdIndex += 1
+
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ CName = PcdItem[0]
+
+ EdkLogger.debug(EdkLogger.DEBUG_1, "PCD = %s.%s" % (CName, TokenSpaceGuidCName))
+ EdkLogger.debug(EdkLogger.DEBUG_1, "phase = %s" % Phase)
+ EdkLogger.debug(EdkLogger.DEBUG_1, "GeneratedTokenNumber = %s" % str(GeneratedTokenNumber))
+
+ #
+ # following four Dict items hold the information for LocalTokenNumberTable
+ #
+ Dict['TOKEN_INIT'][GeneratedTokenNumber] = 'Init'
+ if Pcd.InitString == 'UNINIT':
+ Dict['TOKEN_INIT'][GeneratedTokenNumber] = 'Uninit'
+
+ Dict['TOKEN_CNAME'][GeneratedTokenNumber] = CName
+ Dict['TOKEN_GUID'][GeneratedTokenNumber] = TokenSpaceGuid
+ Dict['TOKEN_TYPE'][GeneratedTokenNumber] = ' | '.join(Pcd.TokenTypeList)
+
+ if Platform.Platform.PcdInfoFlag:
+ TokenSpaceGuidCNameArray = StringToArray('"' + TokenSpaceGuidCName + '"' )
+ if TokenSpaceGuidCNameArray not in Dict['PCD_TOKENSPACE']:
+ Dict['PCD_TOKENSPACE'].append(TokenSpaceGuidCNameArray)
+ Dict['PCD_TOKENSPACE_LENGTH'].append( len(TokenSpaceGuidCNameArray.split(",")) )
+ Dict['PCD_TOKENSPACE_MAP'][GeneratedTokenNumber] = Dict['PCD_TOKENSPACE'].index(TokenSpaceGuidCNameArray)
+ CNameBinArray = StringToArray('"' + CName + '"' )
+ Dict['PCD_CNAME'][GeneratedTokenNumber] = CNameBinArray
+
+ Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CNameBinArray.split(","))
+
+
+ Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
+
+ # search the Offset and Table, used by LocalTokenNumberTableOffset
+ if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
+ # Find index by CName, TokenSpaceGuid
+ Offset = GetMatchedIndex(CName, Dict['VARIABLE_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['VARIABLE_HEAD_GUID_DECL'])
+ assert(Offset != -1)
+ Table = Dict['VARIABLE_DB_VALUE']
+ if 'PCD_TYPE_VPD' in Pcd.TokenTypeList:
+ Offset = GetMatchedIndex(CName, Dict['VPD_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['VPD_HEAD_GUID_DECL'])
+ assert(Offset != -1)
+ Table = Dict['VPD_DB_VALUE']
+ if 'PCD_TYPE_STRING' in Pcd.TokenTypeList and 'PCD_TYPE_HII' not in Pcd.TokenTypeList:
+ # Find index by CName, TokenSpaceGuid
+ Offset = GetMatchedIndex(CName, Dict['STRING_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['STRING_HEAD_GUID_DECL'])
+ Offset = PCD_STRING_INDEX_MAP[Offset]
+ assert(Offset != -1)
+ Table = Dict['STRING_DB_VALUE']
+ if 'PCD_TYPE_DATA' in Pcd.TokenTypeList:
+ # need to store whether it is in init table or not
+ Offset = GetMatchedIndex(CName, Dict[Pcd.InitString+'_CNAME_DECL_'+Pcd.DatumType], TokenSpaceGuid, Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType])
+ assert(Offset != -1)
+ if Pcd.InitString == 'UNINIT':
+ Table = Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType]
+ else:
+ Table = Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType]
+ Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'][GeneratedTokenNumber] = (Offset, Table)
+
+ #
+ # Update VARDEF_HEADER
+ #
+ if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
+ Dict['VARDEF_HEADER'][GeneratedTokenNumber] = '_Variable_Header'
+ else:
+ Dict['VARDEF_HEADER'][GeneratedTokenNumber] = ''
+
+
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+
+ if Phase == 'DXE':
+ GeneratedTokenNumber += NumberOfPeiLocalTokens
+ #
+ # Per, PCD architecture specification, PCD Token Number is 1 based and 0 is defined as invalid token number.
+ # For each EX type PCD, a PCD Token Number is assigned. When the
+ # PCD Driver/PEIM map EX_GUID and EX_TOKEN_NUMBER to the PCD Token Number,
+ # the non-EX Protocol/PPI interface can be called to get/set the value. This assumption is made by
+ # Pcd Driver/PEIM in MdeModulePkg.
+ # Therefore, 1 is added to GeneratedTokenNumber to generate a PCD Token Number before being inserted
+ # to the EXMAPPING_TABLE.
+ #
+
+
+ Dict['EXMAPPING_TABLE_EXTOKEN'].append(str(Pcd.TokenValue) + 'U')
+ Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append(str(GeneratedTokenNumber + 1) + 'U')
+ Dict['EXMAPPING_TABLE_GUID_INDEX'].append(str(GuidList.index(TokenSpaceGuid)) + 'U')
+
+ if Platform.Platform.PcdInfoFlag:
+ for index in range(len(Dict['PCD_TOKENSPACE_MAP'])):
+ TokenSpaceIndex = StringTableSize
+ for i in range(Dict['PCD_TOKENSPACE_MAP'][index]):
+ TokenSpaceIndex += Dict['PCD_TOKENSPACE_LENGTH'][i]
+ Dict['PCD_TOKENSPACE_OFFSET'].append(TokenSpaceIndex)
+ for index in range(len(Dict['PCD_TOKENSPACE'])):
+ StringTableSize += Dict['PCD_TOKENSPACE_LENGTH'][index]
+ StringTableIndex += 1
+ for index in range(len(Dict['PCD_CNAME'])):
+ Dict['PCD_CNAME_OFFSET'].append(StringTableSize)
+ Dict['PCD_NAME_OFFSET'].append(Dict['PCD_TOKENSPACE_OFFSET'][index])
+ Dict['PCD_NAME_OFFSET'].append(StringTableSize)
+ StringTableSize += Dict['PCD_CNAME_LENGTH'][index]
+ StringTableIndex += 1
+ if GuidList != []:
+ Dict['GUID_TABLE_EMPTY'] = 'FALSE'
+ Dict['GUID_TABLE_SIZE'] = str(len(GuidList)) + 'U'
+ else:
+ Dict['GUID_STRUCTURE'] = [GuidStringToGuidStructureString('00000000-0000-0000-0000-000000000000')]
+
+ if StringTableIndex == 0:
+ Dict['STRING_TABLE_INDEX'].append('')
+ Dict['STRING_TABLE_LENGTH'].append(1)
+ Dict['STRING_TABLE_CNAME'].append('')
+ Dict['STRING_TABLE_GUID'].append('')
+ Dict['STRING_TABLE_VALUE'].append('{ 0 }')
+ else:
+ Dict['STRING_TABLE_EMPTY'] = 'FALSE'
+ Dict['STRING_TABLE_SIZE'] = str(StringTableSize) + 'U'
+
+ if Dict['SIZE_TABLE_CNAME'] == []:
+ Dict['SIZE_TABLE_CNAME'].append('')
+ Dict['SIZE_TABLE_GUID'].append('')
+ Dict['SIZE_TABLE_CURRENT_LENGTH'].append(['0U'])
+ Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append('0U')
+
+ if NumberOfLocalTokens != 0:
+ Dict['DATABASE_EMPTY'] = 'FALSE'
+ Dict['LOCAL_TOKEN_NUMBER_TABLE_SIZE'] = NumberOfLocalTokens
+ Dict['LOCAL_TOKEN_NUMBER'] = NumberOfLocalTokens
+
+ if NumberOfExTokens != 0:
+ Dict['EXMAP_TABLE_EMPTY'] = 'FALSE'
+ Dict['EXMAPPING_TABLE_SIZE'] = str(NumberOfExTokens) + 'U'
+ Dict['EX_TOKEN_NUMBER'] = str(NumberOfExTokens) + 'U'
+ else:
+ Dict['EXMAPPING_TABLE_EXTOKEN'].append('0U')
+ Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append('0U')
+ Dict['EXMAPPING_TABLE_GUID_INDEX'].append('0U')
+
+ if NumberOfSizeItems != 0:
+ Dict['SIZE_TABLE_SIZE'] = str(NumberOfSizeItems * 2) + 'U'
+
+ if NumberOfSkuEnabledPcd != 0:
+ Dict['SKU_HEAD_SIZE'] = str(NumberOfSkuEnabledPcd) + 'U'
+
+ for AvailableSkuNumber in SkuObj.SkuIdNumberSet:
+ if AvailableSkuNumber not in Dict['SKUID_VALUE']:
+ Dict['SKUID_VALUE'].append(AvailableSkuNumber)
+ Dict['SKUID_VALUE'][0] = len(Dict['SKUID_VALUE']) - 1
+
+ AutoGenH.Append(gPcdDatabaseAutoGenH.Replace(Dict))
+ if NumberOfLocalTokens == 0:
+ AutoGenC.Append(gEmptyPcdDatabaseAutoGenC.Replace(Dict))
+ else:
+ #
+ # Update Size Table to the right order, it should be same with LocalTokenNumberTable
+ #
+ SizeCNameTempList = []
+ SizeGuidTempList = []
+ SizeCurLenTempList = []
+ SizeMaxLenTempList = []
+ ReOrderFlag = True
+
+ if len(Dict['SIZE_TABLE_CNAME']) == 1:
+ if not (Dict['SIZE_TABLE_CNAME'][0] and Dict['SIZE_TABLE_GUID'][0]):
+ ReOrderFlag = False
+
+ if ReOrderFlag:
+ for Count in range(len(Dict['TOKEN_CNAME'])):
+ for Count1 in range(len(Dict['SIZE_TABLE_CNAME'])):
+ if Dict['TOKEN_CNAME'][Count] == Dict['SIZE_TABLE_CNAME'][Count1] and \
+ Dict['TOKEN_GUID'][Count] == Dict['SIZE_TABLE_GUID'][Count1]:
+ SizeCNameTempList.append(Dict['SIZE_TABLE_CNAME'][Count1])
+ SizeGuidTempList.append(Dict['SIZE_TABLE_GUID'][Count1])
+ SizeCurLenTempList.append(Dict['SIZE_TABLE_CURRENT_LENGTH'][Count1])
+ SizeMaxLenTempList.append(Dict['SIZE_TABLE_MAXIMUM_LENGTH'][Count1])
+
+ for Count in range(len(Dict['SIZE_TABLE_CNAME'])):
+ Dict['SIZE_TABLE_CNAME'][Count] = SizeCNameTempList[Count]
+ Dict['SIZE_TABLE_GUID'][Count] = SizeGuidTempList[Count]
+ Dict['SIZE_TABLE_CURRENT_LENGTH'][Count] = SizeCurLenTempList[Count]
+ Dict['SIZE_TABLE_MAXIMUM_LENGTH'][Count] = SizeMaxLenTempList[Count]
+
+ AutoGenC.Append(gPcdDatabaseAutoGenC.Replace(Dict))
+
+
+# print Phase
+ Buffer = BuildExDataBase(Dict)
+ return AutoGenH, AutoGenC, Buffer, VarCheckTab
+
+def GetOrderedDynamicPcdList(DynamicPcdList, PcdTokenNumberList):
+ ReorderedDyPcdList = [None for i in range(len(DynamicPcdList))]
+ for Pcd in DynamicPcdList:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in PcdTokenNumberList:
+ ReorderedDyPcdList[PcdTokenNumberList[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]-1] = Pcd
+ return ReorderedDyPcdList
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenVar.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenVar.py
new file mode 100755
index 00000000..84cf0fc9
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenVar.py
@@ -0,0 +1,366 @@
+# Copyright (c) 2017 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+#
+# This file is used to collect the Variable checking information
+#
+
+# #
+# Import Modules
+#
+from struct import pack, unpack
+import collections
+import copy
+from Common.VariableAttributes import VariableAttributes
+from Common.Misc import *
+import collections
+import Common.DataType as DataType
+
+var_info = collections.namedtuple("uefi_var", "pcdindex,pcdname,defaultstoragename,skuname,var_name, var_guid, var_offset,var_attribute,pcd_default_value, default_value, data_type,PcdDscLine,StructurePcd")
+NvStorageHeaderSize = 28
+VariableHeaderSize = 32
+
+class VariableMgr(object):
+ def __init__(self, DefaultStoreMap, SkuIdMap):
+ self.VarInfo = []
+ self.DefaultStoreMap = DefaultStoreMap
+ self.SkuIdMap = SkuIdMap
+ self.VpdRegionSize = 0
+ self.VpdRegionOffset = 0
+ self.NVHeaderBuff = None
+ self.VarDefaultBuff = None
+ self.VarDeltaBuff = None
+
+ def append_variable(self, uefi_var):
+ self.VarInfo.append(uefi_var)
+
+ def SetVpdRegionMaxSize(self, maxsize):
+ self.VpdRegionSize = maxsize
+
+ def SetVpdRegionOffset(self, vpdoffset):
+ self.VpdRegionOffset = vpdoffset
+
+ def PatchNVStoreDefaultMaxSize(self, maxsize):
+ if not self.NVHeaderBuff:
+ return ""
+ self.NVHeaderBuff = self.NVHeaderBuff[:8] + pack("=Q", maxsize)
+ default_var_bin = VariableMgr.format_data(self.NVHeaderBuff + self.VarDefaultBuff + self.VarDeltaBuff)
+ value_str = "{"
+ default_var_bin_strip = [ data.strip("""'""") for data in default_var_bin]
+ value_str += ",".join(default_var_bin_strip)
+ value_str += "}"
+ return value_str
+
+ def combine_variable(self):
+ indexedvarinfo = collections.OrderedDict()
+ for item in self.VarInfo:
+ if (item.skuname, item.defaultstoragename, item.var_name, item.var_guid) not in indexedvarinfo:
+ indexedvarinfo[(item.skuname, item.defaultstoragename, item.var_name, item.var_guid) ] = []
+ indexedvarinfo[(item.skuname, item.defaultstoragename, item.var_name, item.var_guid)].append(item)
+ for key in indexedvarinfo:
+ sku_var_info_offset_list = indexedvarinfo[key]
+ sku_var_info_offset_list.sort(key=lambda x:x.PcdDscLine)
+ FirstOffset = int(sku_var_info_offset_list[0].var_offset, 16) if sku_var_info_offset_list[0].var_offset.upper().startswith("0X") else int(sku_var_info_offset_list[0].var_offset)
+ fisrtvalue_list = sku_var_info_offset_list[0].default_value.strip("{").strip("}").split(",")
+ firstdata_type = sku_var_info_offset_list[0].data_type
+ if firstdata_type in DataType.TAB_PCD_NUMERIC_TYPES:
+ fisrtdata_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[firstdata_type]]
+ fisrtdata = fisrtvalue_list[0]
+ fisrtvalue_list = []
+ pack_data = pack(fisrtdata_flag, int(fisrtdata, 0))
+ for data_byte in range(len(pack_data)):
+ fisrtvalue_list.append(hex(unpack("B", pack_data[data_byte:data_byte + 1])[0]))
+ newvalue_list = ["0x00"] * FirstOffset + fisrtvalue_list
+
+ for var_item in sku_var_info_offset_list[1:]:
+ CurOffset = int(var_item.var_offset, 16) if var_item.var_offset.upper().startswith("0X") else int(var_item.var_offset)
+ CurvalueList = var_item.default_value.strip("{").strip("}").split(",")
+ Curdata_type = var_item.data_type
+ if Curdata_type in DataType.TAB_PCD_NUMERIC_TYPES:
+ data_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[Curdata_type]]
+ data = CurvalueList[0]
+ CurvalueList = []
+ pack_data = pack(data_flag, int(data, 0))
+ for data_byte in range(len(pack_data)):
+ CurvalueList.append(hex(unpack("B", pack_data[data_byte:data_byte + 1])[0]))
+ if CurOffset > len(newvalue_list):
+ newvalue_list = newvalue_list + ["0x00"] * (CurOffset - len(newvalue_list)) + CurvalueList
+ else:
+ newvalue_list[CurOffset : CurOffset + len(CurvalueList)] = CurvalueList
+
+ newvaluestr = "{" + ",".join(newvalue_list) +"}"
+ n = sku_var_info_offset_list[0]
+ indexedvarinfo[key] = [var_info(n.pcdindex, n.pcdname, n.defaultstoragename, n.skuname, n.var_name, n.var_guid, "0x00", n.var_attribute, newvaluestr, newvaluestr, DataType.TAB_VOID,n.PcdDscLine,n.StructurePcd)]
+ self.VarInfo = [item[0] for item in list(indexedvarinfo.values())]
+
+ def process_variable_data(self):
+
+ var_data = collections.defaultdict(collections.OrderedDict)
+
+ indexedvarinfo = collections.OrderedDict()
+ for item in self.VarInfo:
+ if item.pcdindex not in indexedvarinfo:
+ indexedvarinfo[item.pcdindex] = dict()
+ indexedvarinfo[item.pcdindex][(item.skuname, item.defaultstoragename)] = item
+
+ for index in indexedvarinfo:
+ sku_var_info = indexedvarinfo[index]
+
+ default_data_buffer = ""
+ others_data_buffer = ""
+ tail = None
+ default_sku_default = indexedvarinfo[index].get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT))
+
+ if default_sku_default.data_type not in DataType.TAB_PCD_NUMERIC_TYPES:
+ var_max_len = max(len(var_item.default_value.split(",")) for var_item in sku_var_info.values())
+ if len(default_sku_default.default_value.split(",")) < var_max_len:
+ tail = ",".join("0x00" for i in range(var_max_len-len(default_sku_default.default_value.split(","))))
+
+ default_data_buffer = VariableMgr.PACK_VARIABLES_DATA(default_sku_default.default_value, default_sku_default.data_type, tail)
+
+ default_data_array = ()
+ for item in range(len(default_data_buffer)):
+ default_data_array += unpack("B", default_data_buffer[item:item + 1])
+
+ var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)])
+
+ for (skuid, defaultstoragename) in indexedvarinfo[index]:
+ tail = None
+ if (skuid, defaultstoragename) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
+ continue
+ other_sku_other = indexedvarinfo[index][(skuid, defaultstoragename)]
+
+ if default_sku_default.data_type not in DataType.TAB_PCD_NUMERIC_TYPES:
+ if len(other_sku_other.default_value.split(",")) < var_max_len:
+ tail = ",".join("0x00" for i in range(var_max_len-len(other_sku_other.default_value.split(","))))
+
+ others_data_buffer = VariableMgr.PACK_VARIABLES_DATA(other_sku_other.default_value, other_sku_other.data_type, tail)
+
+ others_data_array = ()
+ for item in range(len(others_data_buffer)):
+ others_data_array += unpack("B", others_data_buffer[item:item + 1])
+
+ data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array)
+
+ var_data[(skuid, defaultstoragename)][index] = (data_delta, sku_var_info[(skuid, defaultstoragename)])
+ return var_data
+
+ def new_process_varinfo(self):
+ self.combine_variable()
+
+ var_data = self.process_variable_data()
+
+ if not var_data:
+ return []
+
+ pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {})
+ NvStoreDataBuffer = bytearray()
+ var_data_offset = collections.OrderedDict()
+ offset = NvStorageHeaderSize
+ for default_data, default_info in pcds_default_data.values():
+ var_name_buffer = VariableMgr.PACK_VARIABLE_NAME(default_info.var_name)
+
+ vendorguid = default_info.var_guid.split('-')
+
+ if default_info.var_attribute:
+ var_attr_value, _ = VariableAttributes.GetVarAttributes(default_info.var_attribute)
+ else:
+ var_attr_value = 0x07
+
+ DataBuffer = VariableMgr.AlignData(var_name_buffer + default_data)
+
+ data_size = len(DataBuffer)
+ offset += VariableHeaderSize + len(default_info.var_name.split(","))
+ var_data_offset[default_info.pcdindex] = offset
+ offset += data_size - len(default_info.var_name.split(","))
+
+ var_header_buffer = VariableMgr.PACK_VARIABLE_HEADER(var_attr_value, len(default_info.var_name.split(",")), len (default_data), vendorguid)
+ NvStoreDataBuffer += (var_header_buffer + DataBuffer)
+
+ variable_storage_header_buffer = VariableMgr.PACK_VARIABLE_STORE_HEADER(len(NvStoreDataBuffer) + 28)
+
+ nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8)
+
+ data_delta_structure_buffer = bytearray()
+ for skuname, defaultstore in var_data:
+ if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
+ continue
+ pcds_sku_data = var_data[(skuname, defaultstore)]
+ delta_data_set = []
+ for pcdindex in pcds_sku_data:
+ offset = var_data_offset[pcdindex]
+ delta_data, _ = pcds_sku_data[pcdindex]
+ delta_data = [(item[0] + offset, item[1]) for item in delta_data]
+ delta_data_set.extend(delta_data)
+
+ data_delta_structure_buffer += VariableMgr.AlignData(self.PACK_DELTA_DATA(skuname, defaultstore, delta_data_set), 8)
+
+ size = len(nv_default_part + data_delta_structure_buffer) + 16
+ maxsize = self.VpdRegionSize if self.VpdRegionSize else size
+ NV_Store_Default_Header = VariableMgr.PACK_NV_STORE_DEFAULT_HEADER(size, maxsize)
+
+ self.NVHeaderBuff = NV_Store_Default_Header
+ self.VarDefaultBuff =nv_default_part
+ self.VarDeltaBuff = data_delta_structure_buffer
+ return VariableMgr.format_data(NV_Store_Default_Header + nv_default_part + data_delta_structure_buffer)
+
+
+ @staticmethod
+ def format_data(data):
+ return [hex(item) for item in VariableMgr.unpack_data(data)]
+
+ @staticmethod
+ def unpack_data(data):
+ final_data = ()
+ for item in range(len(data)):
+ final_data += unpack("B", data[item:item + 1])
+ return final_data
+
+ @staticmethod
+ def calculate_delta(default, theother):
+ if len(default) - len(theother) != 0:
+ EdkLogger.error("build", FORMAT_INVALID, 'The variable data length is not the same for the same PCD.')
+ data_delta = []
+ for i in range(len(default)):
+ if default[i] != theother[i]:
+ data_delta.append((i, theother[i]))
+ return data_delta
+
+ def dump(self):
+
+ default_var_bin = self.new_process_varinfo()
+ if default_var_bin:
+ value_str = "{"
+ default_var_bin_strip = [ data.strip("""'""") for data in default_var_bin]
+ value_str += ",".join(default_var_bin_strip)
+ value_str += "}"
+ return value_str
+ return ""
+
+ @staticmethod
+ def PACK_VARIABLE_STORE_HEADER(size):
+ #Signature: gEfiVariableGuid
+ Guid = "{ 0xddcf3616, 0x3275, 0x4164, { 0x98, 0xb6, 0xfe, 0x85, 0x70, 0x7f, 0xfe, 0x7d }}"
+ Guid = GuidStructureStringToGuidString(Guid)
+ GuidBuffer = PackGUID(Guid.split('-'))
+
+ SizeBuffer = pack('=L', size)
+ FormatBuffer = pack('=B', 0x5A)
+ StateBuffer = pack('=B', 0xFE)
+ reservedBuffer = pack('=H', 0)
+ reservedBuffer += pack('=L', 0)
+
+ return GuidBuffer + SizeBuffer + FormatBuffer + StateBuffer + reservedBuffer
+
+ @staticmethod
+ def PACK_NV_STORE_DEFAULT_HEADER(size, maxsize):
+ Signature = pack('=B', ord('N'))
+ Signature += pack("=B", ord('S'))
+ Signature += pack("=B", ord('D'))
+ Signature += pack("=B", ord('B'))
+
+ SizeBuffer = pack("=L", size)
+ MaxSizeBuffer = pack("=Q", maxsize)
+
+ return Signature + SizeBuffer + MaxSizeBuffer
+
+ @staticmethod
+ def PACK_VARIABLE_HEADER(attribute, namesize, datasize, vendorguid):
+
+ Buffer = pack('=H', 0x55AA) # pack StartID
+ Buffer += pack('=B', 0x3F) # pack State
+ Buffer += pack('=B', 0) # pack reserved
+
+ Buffer += pack('=L', attribute)
+ Buffer += pack('=L', namesize)
+ Buffer += pack('=L', datasize)
+
+ Buffer += PackGUID(vendorguid)
+
+ return Buffer
+
+ @staticmethod
+ def PACK_VARIABLES_DATA(var_value,data_type, tail = None):
+ Buffer = bytearray()
+ data_len = 0
+ if data_type == DataType.TAB_VOID:
+ for value_char in var_value.strip("{").strip("}").split(","):
+ Buffer += pack("=B", int(value_char, 16))
+ data_len += len(var_value.split(","))
+ if tail:
+ for value_char in tail.split(","):
+ Buffer += pack("=B", int(value_char, 16))
+ data_len += len(tail.split(","))
+ elif data_type == "BOOLEAN":
+ Buffer += pack("=B", True) if var_value.upper() in ["TRUE","1"] else pack("=B", False)
+ data_len += 1
+ elif data_type == DataType.TAB_UINT8:
+ Buffer += pack("=B", GetIntegerValue(var_value))
+ data_len += 1
+ elif data_type == DataType.TAB_UINT16:
+ Buffer += pack("=H", GetIntegerValue(var_value))
+ data_len += 2
+ elif data_type == DataType.TAB_UINT32:
+ Buffer += pack("=L", GetIntegerValue(var_value))
+ data_len += 4
+ elif data_type == DataType.TAB_UINT64:
+ Buffer += pack("=Q", GetIntegerValue(var_value))
+ data_len += 8
+
+ return Buffer
+
+ @staticmethod
+ def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value):
+ Buffer = bytearray()
+ Buffer += pack("=L", 4+8+8)
+ Buffer += pack("=Q", int(skuid))
+ Buffer += pack("=Q", int(defaultstoragename))
+
+ for item in var_value:
+ Buffer += pack("=B", item)
+
+ Buffer = pack("=L", len(Buffer)+4) + Buffer
+
+ return Buffer
+
+ def GetSkuId(self, skuname):
+ if skuname not in self.SkuIdMap:
+ return None
+ return self.SkuIdMap.get(skuname)[0]
+
+ def GetDefaultStoreId(self, dname):
+ if dname not in self.DefaultStoreMap:
+ return None
+ return self.DefaultStoreMap.get(dname)[0]
+
+ def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list):
+ skuid = self.GetSkuId(skuname)
+ defaultstorageid = self.GetDefaultStoreId(defaultstoragename)
+ Buffer = bytearray()
+ Buffer += pack("=L", 4+8+8)
+ Buffer += pack("=Q", int(skuid))
+ Buffer += pack("=Q", int(defaultstorageid))
+ for (delta_offset, value) in delta_list:
+ Buffer += pack("=L", delta_offset)
+ Buffer = Buffer[:-1] + pack("=B", value)
+
+ Buffer = pack("=L", len(Buffer) + 4) + Buffer
+
+ return Buffer
+
+ @staticmethod
+ def AlignData(data, align = 4):
+ mybuffer = data
+ if (len(data) % align) > 0:
+ for i in range(align - (len(data) % align)):
+ mybuffer += pack("=B", 0)
+
+ return mybuffer
+
+ @staticmethod
+ def PACK_VARIABLE_NAME(var_name):
+ Buffer = bytearray()
+ for name_char in var_name.strip("{").strip("}").split(","):
+ Buffer += pack("=B", int(name_char, 16))
+
+ return Buffer
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IdfClassObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IdfClassObject.py
new file mode 100755
index 00000000..350c76bd
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IdfClassObject.py
@@ -0,0 +1,132 @@
+## @file
+# This file is used to collect all defined strings in Image Definition files
+#
+# Copyright (c) 2016, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from Common.StringUtils import GetLineNo
+from Common.Misc import PathClass
+from Common.LongFilePathSupport import LongFilePath
+import re
+import os
+from Common.GlobalData import gIdentifierPattern
+from .UniClassObject import StripComments
+
+IMAGE_TOKEN = re.compile('IMAGE_TOKEN *\(([A-Z0-9_]+) *\)', re.MULTILINE | re.UNICODE)
+
+#
+# Value of different image information block types
+#
+EFI_HII_IIBT_END = 0x00
+EFI_HII_IIBT_IMAGE_1BIT = 0x10
+EFI_HII_IIBT_IMAGE_1BIT_TRANS = 0x11
+EFI_HII_IIBT_IMAGE_4BIT = 0x12
+EFI_HII_IIBT_IMAGE_4BIT_TRANS = 0x13
+EFI_HII_IIBT_IMAGE_8BIT = 0x14
+EFI_HII_IIBT_IMAGE_8BIT_TRANS = 0x15
+EFI_HII_IIBT_IMAGE_24BIT = 0x16
+EFI_HII_IIBT_IMAGE_24BIT_TRANS = 0x17
+EFI_HII_IIBT_IMAGE_JPEG = 0x18
+EFI_HII_IIBT_IMAGE_PNG = 0x19
+EFI_HII_IIBT_DUPLICATE = 0x20
+EFI_HII_IIBT_SKIP2 = 0x21
+EFI_HII_IIBT_SKIP1 = 0x22
+EFI_HII_IIBT_EXT1 = 0x30
+EFI_HII_IIBT_EXT2 = 0x31
+EFI_HII_IIBT_EXT4 = 0x32
+
+#
+# Value of HII package type
+#
+EFI_HII_PACKAGE_TYPE_ALL = 0x00
+EFI_HII_PACKAGE_TYPE_GUID = 0x01
+EFI_HII_PACKAGE_FORMS = 0x02
+EFI_HII_PACKAGE_STRINGS = 0x04
+EFI_HII_PACKAGE_FONTS = 0x05
+EFI_HII_PACKAGE_IMAGES = 0x06
+EFI_HII_PACKAGE_SIMPLE_FONTS = 0x07
+EFI_HII_PACKAGE_DEVICE_PATH = 0x08
+EFI_HII_PACKAGE_KEYBOARD_LAYOUT = 0x09
+EFI_HII_PACKAGE_ANIMATIONS = 0x0A
+EFI_HII_PACKAGE_END = 0xDF
+EFI_HII_PACKAGE_TYPE_SYSTEM_BEGIN = 0xE0
+EFI_HII_PACKAGE_TYPE_SYSTEM_END = 0xFF
+
+class IdfFileClassObject(object):
+ def __init__(self, FileList = []):
+ self.ImageFilesDict = {}
+ self.ImageIDList = []
+ for File in FileList:
+ if File is None:
+ EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'No Image definition file is given.')
+
+ try:
+ IdfFile = open(LongFilePath(File.Path), mode='r')
+ FileIn = IdfFile.read()
+ IdfFile.close()
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File)
+
+ ImageFileList = []
+ for Line in FileIn.splitlines():
+ Line = Line.strip()
+ Line = StripComments(Line)
+ if len(Line) == 0:
+ continue
+
+ LineNo = GetLineNo(FileIn, Line, False)
+ if not Line.startswith('#image '):
+ EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'The %s in Line %s of File %s is invalid.' % (Line, LineNo, File.Path))
+
+ if Line.find('#image ') >= 0:
+ LineDetails = Line.split()
+ Len = len(LineDetails)
+ if Len != 3 and Len != 4:
+ EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'The format is not match #image IMAGE_ID [TRANSPARENT] ImageFileName in Line %s of File %s.' % (LineNo, File.Path))
+ if Len == 4 and LineDetails[2] != 'TRANSPARENT':
+ EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'Please use the keyword "TRANSPARENT" to describe the transparency setting in Line %s of File %s.' % (LineNo, File.Path))
+ MatchString = gIdentifierPattern.match(LineDetails[1])
+ if MatchString is None:
+ EdkLogger.error('Image Definition File Parser', FORMAT_INVALID, 'The Image token name %s defined in Idf file %s contains the invalid character.' % (LineDetails[1], File.Path))
+ if LineDetails[1] not in self.ImageIDList:
+ self.ImageIDList.append(LineDetails[1])
+ else:
+ EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'The %s in Line %s of File %s is already defined.' % (LineDetails[1], LineNo, File.Path))
+ if Len == 4:
+ ImageFile = ImageFileObject(LineDetails[Len-1], LineDetails[1], True)
+ else:
+ ImageFile = ImageFileObject(LineDetails[Len-1], LineDetails[1], False)
+ ImageFileList.append(ImageFile)
+ if ImageFileList:
+ self.ImageFilesDict[File] = ImageFileList
+
+def SearchImageID(ImageFileObject, FileList):
+ if FileList == []:
+ return ImageFileObject
+
+ for File in FileList:
+ if os.path.isfile(File):
+ Lines = open(File, 'r')
+ for Line in Lines:
+ ImageIdList = IMAGE_TOKEN.findall(Line)
+ for ID in ImageIdList:
+ EdkLogger.debug(EdkLogger.DEBUG_5, "Found ImageID identifier: " + ID)
+ ImageFileObject.SetImageIDReferenced(ID)
+
+class ImageFileObject(object):
+ def __init__(self, FileName, ImageID, TransParent = False):
+ self.FileName = FileName
+ self.File = ''
+ self.ImageID = ImageID
+ self.TransParent = TransParent
+ self.Referenced = False
+
+ def SetImageIDReferenced(self, ImageID):
+ if ImageID == self.ImageID:
+ self.Referenced = True
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IncludesAutoGen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IncludesAutoGen.py
new file mode 100755
index 00000000..29169f67
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IncludesAutoGen.py
@@ -0,0 +1,304 @@
+## @file
+# Build cache intermediate result and state
+#
+# Copyright (c) 2019 - 2020, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2020, ARM Limited. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from Common.caching import cached_property
+import Common.EdkLogger as EdkLogger
+import Common.LongFilePathOs as os
+from Common.BuildToolError import *
+from Common.Misc import SaveFileOnChange, PathClass
+from Common.Misc import TemplateString
+import sys
+gIsFileMap = {}
+
+DEP_FILE_TAIL = "# Updated \n"
+
+class IncludesAutoGen():
+ """ This class is to manage the dependent files witch are used in Makefile to support incremental build.
+ 1. C files:
+ 1. MSVS.
+ cl.exe has a build option /showIncludes to display include files on stdout. Build tool captures
+ that messages and generate dependency files, .deps files.
+ 2. CLANG and GCC
+ -MMD -MF build option are used to generate dependency files by compiler. Build tool updates the
+ .deps files.
+ 2. ASL files:
+ 1. Trim find out all the included files with asl specific include format and generate .trim.deps file.
+ 2. ASL PP use c preprocessor to find out all included files with #include format and generate a .deps file
+ 3. build tool updates the .deps file
+ 3. ASM files (.asm, .s or .nasm):
+ 1. Trim find out all the included files with asl specific include format and generate .trim.deps file.
+ 2. ASM PP use c preprocessor to find out all included files with #include format and generate a deps file
+ 3. build tool updates the .deps file
+ """
+ def __init__(self, makefile_folder, ModuleAuto):
+ self.d_folder = makefile_folder
+ self.makefile_folder = makefile_folder
+ self.module_autogen = ModuleAuto
+ self.ToolChainFamily = ModuleAuto.ToolChainFamily
+ self.workspace = ModuleAuto.WorkspaceDir
+
+ def CreateModuleDeps(self):
+ SaveFileOnChange(os.path.join(self.makefile_folder,"deps.txt"),"\n".join(self.DepsCollection),False)
+
+ def CreateDepsInclude(self):
+ deps_file = {'deps_file':self.deps_files}
+
+ MakePath = self.module_autogen.BuildOption.get('MAKE', {}).get('PATH')
+ if not MakePath:
+ EdkLogger.error("build", PARAMETER_MISSING, Message="No Make path available.")
+ elif "nmake" in MakePath:
+ _INCLUDE_DEPS_TEMPLATE = TemplateString('''
+${BEGIN}
+!IF EXIST(${deps_file})
+!INCLUDE ${deps_file}
+!ENDIF
+${END}
+ ''')
+ else:
+ _INCLUDE_DEPS_TEMPLATE = TemplateString('''
+${BEGIN}
+-include ${deps_file}
+${END}
+ ''')
+
+ try:
+ deps_include_str = _INCLUDE_DEPS_TEMPLATE.Replace(deps_file)
+ except Exception as e:
+ print(e)
+ SaveFileOnChange(os.path.join(self.makefile_folder,"dependency"),deps_include_str,False)
+
+ def CreateDepsTarget(self):
+ SaveFileOnChange(os.path.join(self.makefile_folder,"deps_target"),"\n".join([item +":" for item in self.DepsCollection]),False)
+
+ @cached_property
+ def deps_files(self):
+ """ Get all .deps file under module build folder. """
+ deps_files = []
+ for root, _, files in os.walk(self.d_folder, topdown=False):
+ for name in files:
+ if not name.endswith(".deps"):
+ continue
+ abspath = os.path.join(root, name)
+ deps_files.append(abspath)
+ return deps_files
+
+ @cached_property
+ def DepsCollection(self):
+ """ Collect all the dependency files list from all .deps files under a module's build folder """
+ includes = set()
+ targetname = [item[0].Name for item in self.TargetFileList.values()]
+ for abspath in self.deps_files:
+ try:
+ with open(abspath,"r") as fd:
+ lines = fd.readlines()
+
+ firstlineitems = lines[0].split(": ")
+ dependency_file = firstlineitems[1].strip(" \\\n")
+ dependency_file = dependency_file.strip('''"''')
+ if dependency_file:
+ if os.path.normpath(dependency_file +".deps") == abspath:
+ continue
+ filename = os.path.basename(dependency_file).strip()
+ if filename not in targetname:
+ includes.add(dependency_file.strip())
+
+ for item in lines[1:]:
+ if item == DEP_FILE_TAIL:
+ continue
+ dependency_file = item.strip(" \\\n")
+ dependency_file = dependency_file.strip('''"''')
+ if dependency_file == '':
+ continue
+ if os.path.normpath(dependency_file +".deps") == abspath:
+ continue
+ filename = os.path.basename(dependency_file).strip()
+ if filename in targetname:
+ continue
+ includes.add(dependency_file.strip())
+ except Exception as e:
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
+ continue
+ rt = sorted(list(set([item.strip(' " \\\n') for item in includes])))
+ return rt
+
+ @cached_property
+ def SourceFileList(self):
+ """ Get a map of module's source files name to module's source files path """
+ source = {os.path.basename(item.File):item.Path for item in self.module_autogen.SourceFileList}
+ middle_file = {}
+ for afile in source:
+ if afile.upper().endswith(".VFR"):
+ middle_file.update({afile.split(".")[0]+".c":os.path.join(self.module_autogen.DebugDir,afile.split(".")[0]+".c")})
+ if afile.upper().endswith((".S","ASM")):
+ middle_file.update({afile.split(".")[0]+".i":os.path.join(self.module_autogen.OutputDir,afile.split(".")[0]+".i")})
+ if afile.upper().endswith(".ASL"):
+ middle_file.update({afile.split(".")[0]+".i":os.path.join(self.module_autogen.OutputDir,afile.split(".")[0]+".i")})
+ source.update({"AutoGen.c":os.path.join(self.module_autogen.OutputDir,"AutoGen.c")})
+ source.update(middle_file)
+ return source
+
+ @cached_property
+ def HasNamesakeSourceFile(self):
+ source_base_name = set([os.path.basename(item.File) for item in self.module_autogen.SourceFileList])
+ rt = len(source_base_name) != len(self.module_autogen.SourceFileList)
+ return rt
+ @cached_property
+ def CcPPCommandPathSet(self):
+ rt = set()
+ rt.add(self.module_autogen.BuildOption.get('CC',{}).get('PATH'))
+ rt.add(self.module_autogen.BuildOption.get('ASLCC',{}).get('PATH'))
+ rt.add(self.module_autogen.BuildOption.get('ASLPP',{}).get('PATH'))
+ rt.add(self.module_autogen.BuildOption.get('VFRPP',{}).get('PATH'))
+ rt.add(self.module_autogen.BuildOption.get('PP',{}).get('PATH'))
+ rt.add(self.module_autogen.BuildOption.get('APP',{}).get('PATH'))
+ rt.discard(None)
+ return rt
+ @cached_property
+ def TargetFileList(self):
+ """ Get a map of module's target name to a tuple of module's targets path and whose input file path """
+ targets = {}
+ targets["AutoGen.obj"] = (PathClass(os.path.join(self.module_autogen.OutputDir,"AutoGen.obj")),PathClass(os.path.join(self.module_autogen.DebugDir,"AutoGen.c")))
+ for item in self.module_autogen.Targets.values():
+ for block in item:
+ targets[block.Target.Path] = (block.Target,block.Inputs[0])
+ return targets
+
+ def GetRealTarget(self,source_file_abs):
+ """ Get the final target file based on source file abspath """
+ source_target_map = {item[1].Path:item[0].Path for item in self.TargetFileList.values()}
+ source_name_map = {item[1].File:item[0].Path for item in self.TargetFileList.values()}
+ target_abs = source_target_map.get(source_file_abs)
+ if target_abs is None:
+ if source_file_abs.strip().endswith(".i"):
+ sourcefilename = os.path.basename(source_file_abs.strip())
+ for sourcefile in source_name_map:
+ if sourcefilename.split(".")[0] == sourcefile.split(".")[0]:
+ target_abs = source_name_map[sourcefile]
+ break
+ else:
+ target_abs = source_file_abs
+ else:
+ target_abs = source_file_abs
+ return target_abs
+
+ def CreateDepsFileForMsvc(self, DepList):
+ """ Generate dependency files, .deps file from /showIncludes output message """
+ if not DepList:
+ return
+ ModuleDepDict = {}
+ current_source = ""
+ SourceFileAbsPathMap = self.SourceFileList
+ for line in DepList:
+ line = line.strip()
+ if self.HasNamesakeSourceFile:
+ for cc_cmd in self.CcPPCommandPathSet:
+ if cc_cmd in line:
+ if '''"'''+cc_cmd+'''"''' in line:
+ cc_options = line[len(cc_cmd)+2:].split()
+ else:
+ cc_options = line[len(cc_cmd):].split()
+ for item in cc_options:
+ if not item.startswith("/"):
+ if item.endswith(".txt") and item.startswith("@"):
+ with open(item[1:], "r") as file:
+ source_files = file.readlines()[0].split()
+ SourceFileAbsPathMap = {os.path.basename(file): file for file in source_files if
+ os.path.exists(file)}
+ else:
+ if os.path.exists(item):
+ SourceFileAbsPathMap.update({os.path.basename(item): item.strip()})
+ # SourceFileAbsPathMap = {os.path.basename(item):item for item in cc_options if not item.startswith("/") and os.path.exists(item)}
+ if line in SourceFileAbsPathMap:
+ current_source = line
+ if current_source not in ModuleDepDict:
+ ModuleDepDict[SourceFileAbsPathMap[current_source]] = []
+ elif "Note: including file:" == line.lstrip()[:21]:
+ if not current_source:
+ EdkLogger.error("build",BUILD_ERROR, "Parse /showIncludes output failed. line: %s. \n" % line, RaiseError=False)
+ else:
+ ModuleDepDict[SourceFileAbsPathMap[current_source]].append(line.lstrip()[22:].strip())
+
+ for source_abs in ModuleDepDict:
+ if ModuleDepDict[source_abs]:
+ target_abs = self.GetRealTarget(source_abs)
+ dep_file_name = os.path.basename(source_abs) + ".deps"
+ SaveFileOnChange(os.path.join(os.path.dirname(target_abs),dep_file_name)," \\\n".join([target_abs+":"] + ['''"''' + item +'''"''' for item in ModuleDepDict[source_abs]]),False)
+
+ def UpdateDepsFileforNonMsvc(self):
+ """ Update .deps files.
+ 1. Update target path to absolute path.
+ 2. Update middle target to final target.
+ """
+
+ for abspath in self.deps_files:
+ if abspath.endswith(".trim.deps"):
+ continue
+ try:
+ newcontent = []
+ with open(abspath,"r") as fd:
+ lines = fd.readlines()
+ if lines[-1] == DEP_FILE_TAIL:
+ continue
+ firstlineitems = lines[0].strip().split(" ")
+
+ if len(firstlineitems) > 2:
+ sourceitem = firstlineitems[1]
+ else:
+ sourceitem = lines[1].strip().split(" ")[0]
+
+ source_abs = self.SourceFileList.get(sourceitem,sourceitem)
+ firstlineitems[0] = self.GetRealTarget(source_abs)
+ p_target = firstlineitems
+ if not p_target[0].strip().endswith(":"):
+ p_target[0] += ": "
+
+ if len(p_target) == 2:
+ p_target[0] += lines[1]
+ newcontent.append(p_target[0])
+ newcontent.extend(lines[2:])
+ else:
+ line1 = " ".join(p_target).strip()
+ line1 += "\n"
+ newcontent.append(line1)
+ newcontent.extend(lines[1:])
+
+ newcontent.append("\n")
+ newcontent.append(DEP_FILE_TAIL)
+ with open(abspath,"w") as fw:
+ fw.write("".join(newcontent))
+ except Exception as e:
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
+ continue
+
+ def UpdateDepsFileforTrim(self):
+ """ Update .deps file which generated by trim. """
+
+ for abspath in self.deps_files:
+ if not abspath.endswith(".trim.deps"):
+ continue
+ try:
+ newcontent = []
+ with open(abspath,"r") as fd:
+ lines = fd.readlines()
+ if lines[-1] == DEP_FILE_TAIL:
+ continue
+
+ source_abs = lines[0].strip().split(" ")[0]
+ targetitem = self.GetRealTarget(source_abs.strip(" :"))
+
+ targetitem += ": "
+ if len(lines)>=2:
+ targetitem += lines[1]
+ newcontent.append(targetitem)
+ newcontent.extend(lines[2:])
+ newcontent.append("\n")
+ newcontent.append(DEP_FILE_TAIL)
+ with open(abspath,"w") as fw:
+ fw.write("".join(newcontent))
+ except Exception as e:
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
+ continue
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/InfSectionParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/InfSectionParser.py
new file mode 100755
index 00000000..38e3861c
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/InfSectionParser.py
@@ -0,0 +1,119 @@
+## @file
+# Parser a Inf file and Get specify section data.
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from Common.DataType import *
+
+
+class InfSectionParser():
+ def __init__(self, FilePath):
+ self._FilePath = FilePath
+ self._FileSectionDataList = []
+ self._ParserInf()
+
+ def _ParserInf(self):
+ FileLinesList = []
+ UserExtFind = False
+ FindEnd = True
+ FileLastLine = False
+ SectionLine = ''
+ SectionData = []
+
+ try:
+ with open(self._FilePath, "r") as File:
+ FileLinesList = File.readlines()
+ except BaseException:
+ EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath)
+
+ for Index in range(0, len(FileLinesList)):
+ line = str(FileLinesList[Index]).strip()
+ if Index + 1 == len(FileLinesList):
+ FileLastLine = True
+ NextLine = ''
+ else:
+ NextLine = str(FileLinesList[Index + 1]).strip()
+ if UserExtFind and FindEnd == False:
+ if line:
+ SectionData.append(line)
+ if line.startswith(TAB_SECTION_START) and line.endswith(TAB_SECTION_END):
+ SectionLine = line
+ UserExtFind = True
+ FindEnd = False
+
+ if (NextLine != '' and NextLine[0] == TAB_SECTION_START and \
+ NextLine[-1] == TAB_SECTION_END) or FileLastLine:
+ UserExtFind = False
+ FindEnd = True
+ self._FileSectionDataList.append({SectionLine: SectionData[:]})
+ del SectionData[:]
+ SectionLine = ''
+
+ # Get user extension TianoCore data
+ #
+ # @return: a list include some dictionary that key is section and value is a list contain all data.
+ def GetUserExtensionTianoCore(self):
+ UserExtensionTianoCore = []
+ if not self._FileSectionDataList:
+ return UserExtensionTianoCore
+ for SectionDataDict in self._FileSectionDataList:
+ for key in SectionDataDict:
+ if key.lower().startswith("[userextensions") and key.lower().find('.tianocore.') > -1:
+ SectionLine = key.lstrip(TAB_SECTION_START).rstrip(TAB_SECTION_END)
+ SubSectionList = [SectionLine]
+ if str(SectionLine).find(TAB_COMMA_SPLIT) > -1:
+ SubSectionList = str(SectionLine).split(TAB_COMMA_SPLIT)
+ for SubSection in SubSectionList:
+ if SubSection.lower().find('.tianocore.') > -1:
+ UserExtensionTianoCore.append({SubSection: SectionDataDict[key]})
+ return UserExtensionTianoCore
+
+ # Get depex expression
+ #
+ # @return: a list include some dictionary that key is section and value is a list contain all data.
+ def GetDepexExpresionList(self):
+ DepexExpressionList = []
+ if not self._FileSectionDataList:
+ return DepexExpressionList
+ for SectionDataDict in self._FileSectionDataList:
+ for key in SectionDataDict:
+ if key.lower() == "[depex]" or key.lower().startswith("[depex."):
+ SectionLine = key.lstrip(TAB_SECTION_START).rstrip(TAB_SECTION_END)
+ SubSectionList = [SectionLine]
+ if str(SectionLine).find(TAB_COMMA_SPLIT) > -1:
+ SubSectionList = str(SectionLine).split(TAB_COMMA_SPLIT)
+ for SubSection in SubSectionList:
+ SectionList = SubSection.split(TAB_SPLIT)
+ SubKey = ()
+ if len(SectionList) == 1:
+ SubKey = (TAB_ARCH_COMMON, TAB_ARCH_COMMON)
+ elif len(SectionList) == 2:
+ SubKey = (SectionList[1], TAB_ARCH_COMMON)
+ elif len(SectionList) == 3:
+ SubKey = (SectionList[1], SectionList[2])
+ else:
+ EdkLogger.error("build", AUTOGEN_ERROR, 'Section %s is invalid.' % key)
+ DepexExpressionList.append({SubKey: SectionDataDict[key]})
+ return DepexExpressionList
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
new file mode 100755
index 00000000..2bf2cf1a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
@@ -0,0 +1,2456 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from __future__ import absolute_import
+from AutoGen.AutoGen import AutoGen
+from Common.LongFilePathSupport import LongFilePath, CopyLongFilePath
+from Common.BuildToolError import *
+from Common.DataType import *
+from Common.Misc import *
+from Common.StringUtils import NormPath,GetSplitList
+from collections import defaultdict
+from Workspace.WorkspaceCommon import OrderedListDict
+import os.path as path
+import copy
+import hashlib
+from . import InfSectionParser
+from . import GenC
+from . import GenMake
+from . import GenDepex
+from io import BytesIO
+from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile
+from Workspace.MetaFileCommentParser import UsageList
+from .GenPcdDb import CreatePcdDatabaseCode
+from Common.caching import cached_class_function
+from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo
+import json
+import tempfile
+
+## Mapping Makefile type
+gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}
+#
+# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
+# is the former use /I , the Latter used -I to specify include directories
+#
+gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
+gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
+
+## default file name for AutoGen
+gAutoGenCodeFileName = "AutoGen.c"
+gAutoGenHeaderFileName = "AutoGen.h"
+gAutoGenStringFileName = "%(module_name)sStrDefs.h"
+gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"
+gAutoGenDepexFileName = "%(module_name)s.depex"
+gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"
+gAutoGenIdfFileName = "%(module_name)sIdf.hpk"
+gInfSpecVersion = "0x00010017"
+
+#
+# Match name = variable
+#
+gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")
+#
+# The format of guid in efivarstore statement likes following and must be correct:
+# guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
+#
+gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
+
+#
+# Template string to generic AsBuilt INF
+#
+gAsBuiltInfHeaderString = TemplateString("""${header_comments}
+
+# DO NOT EDIT
+# FILE auto-generated
+
+[Defines]
+ INF_VERSION = ${module_inf_version}
+ BASE_NAME = ${module_name}
+ FILE_GUID = ${module_guid}
+ MODULE_TYPE = ${module_module_type}${BEGIN}
+ VERSION_STRING = ${module_version_string}${END}${BEGIN}
+ PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
+ UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
+ PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
+ ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
+ UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
+ CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
+ DESTRUCTOR = ${module_destructor}${END}${BEGIN}
+ SHADOW = ${module_shadow}${END}${BEGIN}
+ PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
+ PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
+ PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
+ PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
+ BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
+ SPEC = ${module_spec}${END}${BEGIN}
+ UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
+ MODULE_UNI_FILE = ${module_uni_file}${END}
+
+[Packages.${module_arch}]${BEGIN}
+ ${package_item}${END}
+
+[Binaries.${module_arch}]${BEGIN}
+ ${binary_item}${END}
+
+[PatchPcd.${module_arch}]${BEGIN}
+ ${patchablepcd_item}
+${END}
+
+[Protocols.${module_arch}]${BEGIN}
+ ${protocol_item}
+${END}
+
+[Ppis.${module_arch}]${BEGIN}
+ ${ppi_item}
+${END}
+
+[Guids.${module_arch}]${BEGIN}
+ ${guid_item}
+${END}
+
+[PcdEx.${module_arch}]${BEGIN}
+ ${pcd_item}
+${END}
+
+[LibraryClasses.${module_arch}]
+## @LIB_INSTANCES${BEGIN}
+# ${libraryclasses_item}${END}
+
+${depexsection_item}
+
+${userextension_tianocore_item}
+
+${tail_comments}
+
+[BuildOptions.${module_arch}]
+## @AsBuilt${BEGIN}
+## ${flags_item}${END}
+""")
+#
+# extend lists contained in a dictionary with lists stored in another dictionary
+# if CopyToDict is not derived from DefaultDict(list) then this may raise exception
+#
+def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):
+ for Key in CopyFromDict:
+ CopyToDict[Key].extend(CopyFromDict[Key])
+
+# Create a directory specified by a set of path elements and return the full path
+def _MakeDir(PathList):
+ RetVal = path.join(*PathList)
+ CreateDirectory(RetVal)
+ return RetVal
+
+#
+# Convert string to C format array
+#
+def _ConvertStringToByteArray(Value):
+ Value = Value.strip()
+ if not Value:
+ return None
+ if Value[0] == '{':
+ if not Value.endswith('}'):
+ return None
+ Value = Value.replace(' ', '').replace('{', '').replace('}', '')
+ ValFields = Value.split(',')
+ try:
+ for Index in range(len(ValFields)):
+ ValFields[Index] = str(int(ValFields[Index], 0))
+ except ValueError:
+ return None
+ Value = '{' + ','.join(ValFields) + '}'
+ return Value
+
+ Unicode = False
+ if Value.startswith('L"'):
+ if not Value.endswith('"'):
+ return None
+ Value = Value[1:]
+ Unicode = True
+ elif not Value.startswith('"') or not Value.endswith('"'):
+ return None
+
+ Value = eval(Value) # translate escape character
+ NewValue = '{'
+ for Index in range(0, len(Value)):
+ if Unicode:
+ NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','
+ else:
+ NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','
+ Value = NewValue + '0}'
+ return Value
+
+## ModuleAutoGen class
+#
+# This class encapsules the AutoGen behaviors for the build tools. In addition to
+# the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
+# to the [depex] section in module's inf file.
+#
+class ModuleAutoGen(AutoGen):
+ # call super().__init__ then call the worker function with different parameter count
+ def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+ if not hasattr(self, "_Init"):
+ self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)
+ self._Init = True
+
+ ## Cache the timestamps of metafiles of every module in a class attribute
+ #
+ TimeDict = {}
+
+ def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+# check if this module is employed by active platform
+ if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):
+ EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
+ % (MetaFile, Arch))
+ return None
+ return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
+
+ ## Initialize ModuleAutoGen
+ #
+ # @param Workspace EdkIIWorkspaceBuild object
+ # @param ModuleFile The path of module file
+ # @param Target Build target (DEBUG, RELEASE)
+ # @param Toolchain Name of tool chain
+ # @param Arch The arch the module supports
+ # @param PlatformFile Platform meta-file
+ #
+ def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):
+ EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))
+ GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)
+
+ self.Workspace = Workspace
+ self.WorkspaceDir = ""
+ self.PlatformInfo = None
+ self.DataPipe = DataPipe
+ self.__init_platform_info__()
+ self.MetaFile = ModuleFile
+ self.SourceDir = self.MetaFile.SubDir
+ self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)
+
+ self.ToolChain = Toolchain
+ self.BuildTarget = Target
+ self.Arch = Arch
+ self.ToolChainFamily = self.PlatformInfo.ToolChainFamily
+ self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily
+
+ self.IsCodeFileCreated = False
+ self.IsAsBuiltInfCreated = False
+ self.DepexGenerated = False
+
+ self.BuildDatabase = self.Workspace.BuildDatabase
+ self.BuildRuleOrder = None
+ self.BuildTime = 0
+
+ self._GuidComments = OrderedListDict()
+ self._ProtocolComments = OrderedListDict()
+ self._PpiComments = OrderedListDict()
+ self._BuildTargets = None
+ self._IntroBuildTargetList = None
+ self._FinalBuildTargetList = None
+ self._FileTypes = None
+
+ self.AutoGenDepSet = set()
+ self.ReferenceModules = []
+ self.ConstPcd = {}
+ self.Makefile = None
+ self.FileDependCache = {}
+
+ def __init_platform_info__(self):
+ pinfo = self.DataPipe.Get("P_Info")
+ self.WorkspaceDir = pinfo.get("WorkspaceDir")
+ self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)
+ ## hash() operator of ModuleAutoGen
+ #
+ # The module file path and arch string will be used to represent
+ # hash value of this object
+ #
+ # @retval int Hash value of the module file path and arch
+ #
+ @cached_class_function
+ def __hash__(self):
+ return hash((self.MetaFile, self.Arch, self.ToolChain,self.BuildTarget))
+ def __repr__(self):
+ return "%s [%s]" % (self.MetaFile, self.Arch)
+
+ # Get FixedAtBuild Pcds of this Module
+ @cached_property
+ def FixedAtBuildPcds(self):
+ RetVal = []
+ for Pcd in self.ModulePcdList:
+ if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:
+ continue
+ if Pcd not in RetVal:
+ RetVal.append(Pcd)
+ return RetVal
+
+ @cached_property
+ def FixedVoidTypePcds(self):
+ RetVal = {}
+ for Pcd in self.FixedAtBuildPcds:
+ if Pcd.DatumType == TAB_VOID:
+ if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:
+ RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue
+ return RetVal
+
+ @property
+ def UniqueBaseName(self):
+ ModuleNames = self.DataPipe.Get("M_Name")
+ if not ModuleNames:
+ return self.Name
+ return ModuleNames.get((self.Name,self.MetaFile),self.Name)
+
+ # Macros could be used in build_rule.txt (also Makefile)
+ @cached_property
+ def Macros(self):
+ return OrderedDict((
+ ("WORKSPACE" ,self.WorkspaceDir),
+ ("MODULE_NAME" ,self.Name),
+ ("MODULE_NAME_GUID" ,self.UniqueBaseName),
+ ("MODULE_GUID" ,self.Guid),
+ ("MODULE_VERSION" ,self.Version),
+ ("MODULE_TYPE" ,self.ModuleType),
+ ("MODULE_FILE" ,str(self.MetaFile)),
+ ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),
+ ("MODULE_RELATIVE_DIR" ,self.SourceDir),
+ ("MODULE_DIR" ,self.SourceDir),
+ ("BASE_NAME" ,self.Name),
+ ("ARCH" ,self.Arch),
+ ("TOOLCHAIN" ,self.ToolChain),
+ ("TOOLCHAIN_TAG" ,self.ToolChain),
+ ("TOOL_CHAIN_TAG" ,self.ToolChain),
+ ("TARGET" ,self.BuildTarget),
+ ("BUILD_DIR" ,self.PlatformInfo.BuildDir),
+ ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
+ ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
+ ("MODULE_BUILD_DIR" ,self.BuildDir),
+ ("OUTPUT_DIR" ,self.OutputDir),
+ ("DEBUG_DIR" ,self.DebugDir),
+ ("DEST_DIR_OUTPUT" ,self.OutputDir),
+ ("DEST_DIR_DEBUG" ,self.DebugDir),
+ ("PLATFORM_NAME" ,self.PlatformInfo.Name),
+ ("PLATFORM_GUID" ,self.PlatformInfo.Guid),
+ ("PLATFORM_VERSION" ,self.PlatformInfo.Version),
+ ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),
+ ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),
+ ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),
+ ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)
+ ))
+
+ ## Return the module build data object
+ @cached_property
+ def Module(self):
+ return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
+
+ ## Return the module name
+ @cached_property
+ def Name(self):
+ return self.Module.BaseName
+
+ ## Return the module DxsFile if exist
+ @cached_property
+ def DxsFile(self):
+ return self.Module.DxsFile
+
+ ## Return the module meta-file GUID
+ @cached_property
+ def Guid(self):
+ #
+ # To build same module more than once, the module path with FILE_GUID overridden has
+ # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
+ # in DSC. The overridden GUID can be retrieved from file name
+ #
+ if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):
+ #
+ # Length of GUID is 36
+ #
+ return os.path.basename(self.MetaFile.Path)[:36]
+ return self.Module.Guid
+
+ ## Return the module version
+ @cached_property
+ def Version(self):
+ return self.Module.Version
+
+ ## Return the module type
+ @cached_property
+ def ModuleType(self):
+ return self.Module.ModuleType
+
+ ## Return the component type (for Edk.x style of module)
+ @cached_property
+ def ComponentType(self):
+ return self.Module.ComponentType
+
+ ## Return the build type
+ @cached_property
+ def BuildType(self):
+ return self.Module.BuildType
+
+ ## Return the PCD_IS_DRIVER setting
+ @cached_property
+ def PcdIsDriver(self):
+ return self.Module.PcdIsDriver
+
+ ## Return the autogen version, i.e. module meta-file version
+ @cached_property
+ def AutoGenVersion(self):
+ return self.Module.AutoGenVersion
+
+ ## Check if the module is library or not
+ @cached_property
+ def IsLibrary(self):
+ return bool(self.Module.LibraryClass)
+
+ ## Check if the module is binary module or not
+ @cached_property
+ def IsBinaryModule(self):
+ return self.Module.IsBinaryModule
+
+ ## Return the directory to store intermediate files of the module
+ @cached_property
+ def BuildDir(self):
+ return _MakeDir((
+ self.PlatformInfo.BuildDir,
+ self.Arch,
+ self.SourceDir,
+ self.MetaFile.BaseName
+ ))
+
+ ## Return the directory to store the intermediate object files of the module
+ @cached_property
+ def OutputDir(self):
+ return _MakeDir((self.BuildDir, "OUTPUT"))
+
+ ## Return the directory path to store ffs file
+ @cached_property
+ def FfsOutputDir(self):
+ if GlobalData.gFdfParser:
+ return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
+ return ''
+
+ ## Return the directory to store auto-gened source files of the module
+ @cached_property
+ def DebugDir(self):
+ return _MakeDir((self.BuildDir, "DEBUG"))
+
+ ## Return the path of custom file
+ @cached_property
+ def CustomMakefile(self):
+ RetVal = {}
+ for Type in self.Module.CustomMakefile:
+ MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'
+ File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
+ RetVal[MakeType] = File
+ return RetVal
+
+ ## Return the directory of the makefile
+ #
+ # @retval string The directory string of module's makefile
+ #
+ @cached_property
+ def MakeFileDir(self):
+ return self.BuildDir
+
+ ## Return build command string
+ #
+ # @retval string Build command string
+ #
+ @cached_property
+ def BuildCommand(self):
+ return self.PlatformInfo.BuildCommand
+
+ ## Get Module package and Platform package
+ #
+ # @retval list The list of package object
+ #
+ @cached_property
+ def PackageList(self):
+ PkagList = []
+ if self.Module.Packages:
+ PkagList.extend(self.Module.Packages)
+ Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
+ for Package in Platform.Packages:
+ if Package in PkagList:
+ continue
+ PkagList.append(Package)
+ return PkagList
+
+ ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on
+ #
+ # @retval list The list of package object
+ #
+ @cached_property
+ def DerivedPackageList(self):
+ PackageList = []
+ PackageList.extend(self.PackageList)
+ for M in self.DependentLibraryList:
+ for Package in M.Packages:
+ if Package in PackageList:
+ continue
+ PackageList.append(Package)
+ return PackageList
+
+ ## Get the depex string
+ #
+ # @return : a string contain all depex expression.
+ def _GetDepexExpresionString(self):
+ DepexStr = ''
+ DepexList = []
+ ## DPX_SOURCE IN Define section.
+ if self.Module.DxsFile:
+ return DepexStr
+ for M in [self.Module] + self.DependentLibraryList:
+ Filename = M.MetaFile.Path
+ InfObj = InfSectionParser.InfSectionParser(Filename)
+ DepexExpressionList = InfObj.GetDepexExpresionList()
+ for DepexExpression in DepexExpressionList:
+ for key in DepexExpression:
+ Arch, ModuleType = key
+ DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]
+ # the type of build module is USER_DEFINED.
+ # All different DEPEX section tags would be copied into the As Built INF file
+ # and there would be separate DEPEX section tags
+ if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
+ if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):
+ DepexList.append({(Arch, ModuleType): DepexExpr})
+ else:
+ if Arch.upper() == TAB_ARCH_COMMON or \
+ (Arch.upper() == self.Arch.upper() and \
+ ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):
+ DepexList.append({(Arch, ModuleType): DepexExpr})
+
+ #the type of build module is USER_DEFINED.
+ if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
+ for Depex in DepexList:
+ for key in Depex:
+ DepexStr += '[Depex.%s.%s]\n' % key
+ DepexStr += '\n'.join('# '+ val for val in Depex[key])
+ DepexStr += '\n\n'
+ if not DepexStr:
+ return '[Depex.%s]\n' % self.Arch
+ return DepexStr
+
+ #the type of build module not is USER_DEFINED.
+ Count = 0
+ for Depex in DepexList:
+ Count += 1
+ if DepexStr != '':
+ DepexStr += ' AND '
+ DepexStr += '('
+ for D in Depex.values():
+ DepexStr += ' '.join(val for val in D)
+ Index = DepexStr.find('END')
+ if Index > -1 and Index == len(DepexStr) - 3:
+ DepexStr = DepexStr[:-3]
+ DepexStr = DepexStr.strip()
+ DepexStr += ')'
+ if Count == 1:
+ DepexStr = DepexStr.lstrip('(').rstrip(')').strip()
+ if not DepexStr:
+ return '[Depex.%s]\n' % self.Arch
+ return '[Depex.%s]\n# ' % self.Arch + DepexStr
+
+ ## Merge dependency expression
+ #
+ # @retval list The token list of the dependency expression after parsed
+ #
+ @cached_property
+ def DepexList(self):
+ if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
+ return {}
+
+ DepexList = []
+ #
+ # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
+ #
+ FixedVoidTypePcds = {}
+ for M in [self] + self.LibraryAutoGenList:
+ FixedVoidTypePcds.update(M.FixedVoidTypePcds)
+ for M in [self] + self.LibraryAutoGenList:
+ Inherited = False
+ for D in M.Module.Depex[self.Arch, self.ModuleType]:
+ if DepexList != []:
+ DepexList.append('AND')
+ DepexList.append('(')
+ #replace D with value if D is FixedAtBuild PCD
+ NewList = []
+ for item in D:
+ if '.' not in item:
+ NewList.append(item)
+ else:
+ try:
+ Value = FixedVoidTypePcds[item]
+ if len(Value.split(',')) != 16:
+ EdkLogger.error("build", FORMAT_INVALID,
+ "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))
+ NewList.append(Value)
+ except:
+ EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))
+
+ DepexList.extend(NewList)
+ if DepexList[-1] == 'END': # no need of a END at this time
+ DepexList.pop()
+ DepexList.append(')')
+ Inherited = True
+ if Inherited:
+ EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))
+ if 'BEFORE' in DepexList or 'AFTER' in DepexList:
+ break
+ if len(DepexList) > 0:
+ EdkLogger.verbose('')
+ return {self.ModuleType:DepexList}
+
+ ## Merge dependency expression
+ #
+ # @retval list The token list of the dependency expression after parsed
+ #
+ @cached_property
+ def DepexExpressionDict(self):
+ if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
+ return {}
+
+ DepexExpressionString = ''
+ #
+ # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
+ #
+ for M in [self.Module] + self.DependentLibraryList:
+ Inherited = False
+ for D in M.DepexExpression[self.Arch, self.ModuleType]:
+ if DepexExpressionString != '':
+ DepexExpressionString += ' AND '
+ DepexExpressionString += '('
+ DepexExpressionString += D
+ DepexExpressionString = DepexExpressionString.rstrip('END').strip()
+ DepexExpressionString += ')'
+ Inherited = True
+ if Inherited:
+ EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))
+ if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:
+ break
+ if len(DepexExpressionString) > 0:
+ EdkLogger.verbose('')
+
+ return {self.ModuleType:DepexExpressionString}
+
+ # Get the tiano core user extension, it is contain dependent library.
+ # @retval: a list contain tiano core userextension.
+ #
+ def _GetTianoCoreUserExtensionList(self):
+ TianoCoreUserExtentionList = []
+ for M in [self.Module] + self.DependentLibraryList:
+ Filename = M.MetaFile.Path
+ InfObj = InfSectionParser.InfSectionParser(Filename)
+ TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()
+ for TianoCoreUserExtent in TianoCoreUserExtenList:
+ for Section in TianoCoreUserExtent:
+ ItemList = Section.split(TAB_SPLIT)
+ Arch = self.Arch
+ if len(ItemList) == 4:
+ Arch = ItemList[3]
+ if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():
+ TianoCoreList = []
+ TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])
+ TianoCoreList.extend(TianoCoreUserExtent[Section][:])
+ TianoCoreList.append('\n')
+ TianoCoreUserExtentionList.append(TianoCoreList)
+
+ return TianoCoreUserExtentionList
+
+ ## Return the list of specification version required for the module
+ #
+ # @retval list The list of specification defined in module file
+ #
+ @cached_property
+ def Specification(self):
+ return self.Module.Specification
+
+ ## Tool option for the module build
+ #
+ # @param PlatformInfo The object of PlatformBuildInfo
+ # @retval dict The dict containing valid options
+ #
+ @cached_property
+ def BuildOption(self):
+ RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)
+ if self.BuildRuleOrder:
+ self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
+ return RetVal
+
+ ## Get include path list from tool option for the module build
+ #
+ # @retval list The include path list
+ #
+ @cached_property
+ def BuildOptionIncPathList(self):
+ #
+ # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
+ # is the former use /I , the Latter used -I to specify include directories
+ #
+ if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):
+ BuildOptIncludeRegEx = gBuildOptIncludePatternMsft
+ elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):
+ BuildOptIncludeRegEx = gBuildOptIncludePatternOther
+ else:
+ #
+ # New ToolChainFamily, don't known whether there is option to specify include directories
+ #
+ return []
+
+ RetVal = []
+ for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
+ try:
+ FlagOption = self.BuildOption[Tool]['FLAGS']
+ except KeyError:
+ FlagOption = ''
+
+ if self.ToolChainFamily != 'RVCT':
+ IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]
+ else:
+ #
+ # RVCT may specify a list of directory seperated by commas
+ #
+ IncPathList = []
+ for Path in BuildOptIncludeRegEx.findall(FlagOption):
+ PathList = GetSplitList(Path, TAB_COMMA_SPLIT)
+ IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)
+
+ #
+ # EDK II modules must not reference header files outside of the packages they depend on or
+ # within the module's directory tree. Report error if violation.
+ #
+ if GlobalData.gDisableIncludePathCheck == False:
+ for Path in IncPathList:
+ if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):
+ ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)
+ EdkLogger.error("build",
+ PARAMETER_INVALID,
+ ExtraData=ErrMsg,
+ File=str(self.MetaFile))
+ RetVal += IncPathList
+ return RetVal
+
+ ## Return a list of files which can be built from source
+ #
+ # What kind of files can be built is determined by build rules in
+ # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
+ #
+ @cached_property
+ def SourceFileList(self):
+ RetVal = []
+ ToolChainTagSet = {"", TAB_STAR, self.ToolChain}
+ ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}
+ for F in self.Module.Sources:
+ # match tool chain
+ if F.TagName not in ToolChainTagSet:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "
+ "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))
+ continue
+ # match tool chain family or build rule family
+ if F.ToolChainFamily not in ToolChainFamilySet:
+ EdkLogger.debug(
+ EdkLogger.DEBUG_0,
+ "The file [%s] must be built by tools of [%s], " \
+ "but current toolchain family is [%s], buildrule family is [%s]" \
+ % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))
+ continue
+
+ # add the file path into search path list for file including
+ if F.Dir not in self.IncludePathList:
+ self.IncludePathList.insert(0, F.Dir)
+ RetVal.append(F)
+
+ self._MatchBuildRuleOrder(RetVal)
+
+ for F in RetVal:
+ self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)
+ return RetVal
+
+ def _MatchBuildRuleOrder(self, FileList):
+ Order_Dict = {}
+ self.BuildOption
+ for SingleFile in FileList:
+ if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:
+ key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]
+ if key in Order_Dict:
+ Order_Dict[key].append(SingleFile.Ext)
+ else:
+ Order_Dict[key] = [SingleFile.Ext]
+
+ RemoveList = []
+ for F in Order_Dict:
+ if len(Order_Dict[F]) > 1:
+ Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))
+ for Ext in Order_Dict[F][1:]:
+ RemoveList.append(F + Ext)
+
+ for item in RemoveList:
+ FileList.remove(item)
+
+ return FileList
+
+ ## Return the list of unicode files
+ @cached_property
+ def UnicodeFileList(self):
+ return self.FileTypes.get(TAB_UNICODE_FILE,[])
+
+ ## Return the list of vfr files
+ @cached_property
+ def VfrFileList(self):
+ return self.FileTypes.get(TAB_VFR_FILE, [])
+
+ ## Return the list of Image Definition files
+ @cached_property
+ def IdfFileList(self):
+ return self.FileTypes.get(TAB_IMAGE_FILE,[])
+
+ ## Return a list of files which can be built from binary
+ #
+ # "Build" binary files are just to copy them to build directory.
+ #
+ # @retval list The list of files which can be built later
+ #
+ @cached_property
+ def BinaryFileList(self):
+ RetVal = []
+ for F in self.Module.Binaries:
+ if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:
+ continue
+ RetVal.append(F)
+ self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)
+ return RetVal
+
+ @cached_property
+ def BuildRules(self):
+ RetVal = {}
+ BuildRuleDatabase = self.PlatformInfo.BuildRule
+ for Type in BuildRuleDatabase.FileTypeList:
+ #first try getting build rule by BuildRuleFamily
+ RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]
+ if not RuleObject:
+ # build type is always module type, but ...
+ if self.ModuleType != self.BuildType:
+ RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]
+ #second try getting build rule by ToolChainFamily
+ if not RuleObject:
+ RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]
+ if not RuleObject:
+ # build type is always module type, but ...
+ if self.ModuleType != self.BuildType:
+ RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]
+ if not RuleObject:
+ continue
+ RuleObject = RuleObject.Instantiate(self.Macros)
+ RetVal[Type] = RuleObject
+ for Ext in RuleObject.SourceFileExtList:
+ RetVal[Ext] = RuleObject
+ return RetVal
+
+ def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):
+ if self._BuildTargets is None:
+ self._IntroBuildTargetList = set()
+ self._FinalBuildTargetList = set()
+ self._BuildTargets = defaultdict(set)
+ self._FileTypes = defaultdict(set)
+
+ if not BinaryFileList:
+ BinaryFileList = self.BinaryFileList
+
+ SubDirectory = os.path.join(self.OutputDir, File.SubDir)
+ if not os.path.exists(SubDirectory):
+ CreateDirectory(SubDirectory)
+ TargetList = set()
+ FinalTargetName = set()
+ RuleChain = set()
+ SourceList = [File]
+ Index = 0
+ #
+ # Make sure to get build rule order value
+ #
+ self.BuildOption
+
+ while Index < len(SourceList):
+ # Reset the FileType if not the first iteration.
+ if Index > 0:
+ FileType = TAB_UNKNOWN_FILE
+ Source = SourceList[Index]
+ Index = Index + 1
+
+ if Source != File:
+ CreateDirectory(Source.Dir)
+
+ if File.IsBinary and File == Source and File in BinaryFileList:
+ # Skip all files that are not binary libraries
+ if not self.IsLibrary:
+ continue
+ RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]
+ elif FileType in self.BuildRules:
+ RuleObject = self.BuildRules[FileType]
+ elif Source.Ext in self.BuildRules:
+ RuleObject = self.BuildRules[Source.Ext]
+ else:
+ # No more rule to apply: Source is a final target.
+ FinalTargetName.add(Source)
+ continue
+
+ FileType = RuleObject.SourceFileType
+ self._FileTypes[FileType].add(Source)
+
+ # stop at STATIC_LIBRARY for library
+ if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:
+ FinalTargetName.add(Source)
+ continue
+
+ Target = RuleObject.Apply(Source, self.BuildRuleOrder)
+ if not Target:
+ # No Target: Source is a final target.
+ FinalTargetName.add(Source)
+ continue
+
+ TargetList.add(Target)
+ self._BuildTargets[FileType].add(Target)
+
+ if not Source.IsBinary and Source == File:
+ self._IntroBuildTargetList.add(Target)
+
+ # to avoid cyclic rule
+ if FileType in RuleChain:
+ EdkLogger.error("build", ERROR_STATEMENT, "Cyclic dependency detected while generating rule for %s" % str(Source))
+
+ RuleChain.add(FileType)
+ SourceList.extend(Target.Outputs)
+
+ # For each final target name, retrieve the corresponding TargetDescBlock instance.
+ for FTargetName in FinalTargetName:
+ for Target in TargetList:
+ if FTargetName == Target.Target:
+ self._FinalBuildTargetList.add(Target)
+
+ @cached_property
+ def Targets(self):
+ if self._BuildTargets is None:
+ self._IntroBuildTargetList = set()
+ self._FinalBuildTargetList = set()
+ self._BuildTargets = defaultdict(set)
+ self._FileTypes = defaultdict(set)
+
+ #TRICK: call SourceFileList property to apply build rule for source files
+ self.SourceFileList
+
+ #TRICK: call _GetBinaryFileList to apply build rule for binary files
+ self.BinaryFileList
+
+ return self._BuildTargets
+
+ @cached_property
+ def IntroTargetList(self):
+ self.Targets
+ return self._IntroBuildTargetList
+
+ @cached_property
+ def CodaTargetList(self):
+ self.Targets
+ return self._FinalBuildTargetList
+
+ @cached_property
+ def FileTypes(self):
+ self.Targets
+ return self._FileTypes
+
+ ## Get the list of package object the module depends on and the Platform depends on
+ #
+ # @retval list The package object list
+ #
+ @cached_property
+ def DependentPackageList(self):
+ return self.PackageList
+
+ ## Return the list of auto-generated code file
+ #
+ # @retval list The list of auto-generated file
+ #
+ @cached_property
+ def AutoGenFileList(self):
+ AutoGenUniIdf = self.BuildType != 'UEFI_HII'
+ UniStringBinBuffer = BytesIO()
+ IdfGenBinBuffer = BytesIO()
+ RetVal = {}
+ AutoGenC = TemplateString()
+ AutoGenH = TemplateString()
+ StringH = TemplateString()
+ StringIdf = TemplateString()
+ GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)
+ #
+ # AutoGen.c is generated if there are library classes in inf, or there are object files
+ #
+ if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0
+ or TAB_OBJECT_FILE in self.FileTypes):
+ AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)
+ RetVal[AutoFile] = str(AutoGenC)
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if str(AutoGenH) != "":
+ AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)
+ RetVal[AutoFile] = str(AutoGenH)
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if str(StringH) != "":
+ AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
+ RetVal[AutoFile] = str(StringH)
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
+ AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
+ RetVal[AutoFile] = UniStringBinBuffer.getvalue()
+ AutoFile.IsBinary = True
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if UniStringBinBuffer is not None:
+ UniStringBinBuffer.close()
+ if str(StringIdf) != "":
+ AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
+ RetVal[AutoFile] = str(StringIdf)
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
+ AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
+ RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
+ AutoFile.IsBinary = True
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if IdfGenBinBuffer is not None:
+ IdfGenBinBuffer.close()
+ return RetVal
+
+ ## Return the list of library modules explicitly or implicitly used by this module
+ @cached_property
+ def DependentLibraryList(self):
+ # only merge library classes and PCD for non-library module
+ if self.IsLibrary:
+ return []
+ return self.PlatformInfo.ApplyLibraryInstance(self.Module)
+
+ ## Get the list of PCDs from current module
+ #
+ # @retval list The list of PCD
+ #
+ @cached_property
+ def ModulePcdList(self):
+ # apply PCD settings from platform
+ RetVal = self.PlatformInfo.ApplyPcdSetting(self, self.Module.Pcds)
+
+ return RetVal
+ @cached_property
+ def _PcdComments(self):
+ ReVal = OrderedListDict()
+ ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)
+ if not self.IsLibrary:
+ for Library in self.DependentLibraryList:
+ ExtendCopyDictionaryLists(ReVal, Library.PcdComments)
+ return ReVal
+
+ ## Get the list of PCDs from dependent libraries
+ #
+ # @retval list The list of PCD
+ #
+ @cached_property
+ def LibraryPcdList(self):
+ if self.IsLibrary:
+ return []
+ RetVal = []
+ Pcds = set()
+ # get PCDs from dependent libraries
+ for Library in self.DependentLibraryList:
+ PcdsInLibrary = OrderedDict()
+ for Key in Library.Pcds:
+ # skip duplicated PCDs
+ if Key in self.Module.Pcds or Key in Pcds:
+ continue
+ Pcds.add(Key)
+ PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])
+ RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self, PcdsInLibrary, Library=Library))
+ return RetVal
+
+ ## Get the GUID value mapping
+ #
+ # @retval dict The mapping between GUID cname and its value
+ #
+ @cached_property
+ def GuidList(self):
+ RetVal = self.Module.Guids
+ for Library in self.DependentLibraryList:
+ RetVal.update(Library.Guids)
+ ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)
+ ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)
+ return RetVal
+
+ @cached_property
+ def GetGuidsUsedByPcd(self):
+ RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())
+ for Library in self.DependentLibraryList:
+ RetVal.update(Library.GetGuidsUsedByPcd())
+ return RetVal
+ ## Get the protocol value mapping
+ #
+ # @retval dict The mapping between protocol cname and its value
+ #
+ @cached_property
+ def ProtocolList(self):
+ RetVal = OrderedDict(self.Module.Protocols)
+ for Library in self.DependentLibraryList:
+ RetVal.update(Library.Protocols)
+ ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)
+ ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)
+ return RetVal
+
+ ## Get the PPI value mapping
+ #
+ # @retval dict The mapping between PPI cname and its value
+ #
+ @cached_property
+ def PpiList(self):
+ RetVal = OrderedDict(self.Module.Ppis)
+ for Library in self.DependentLibraryList:
+ RetVal.update(Library.Ppis)
+ ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)
+ ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)
+ return RetVal
+
+ ## Get the list of include search path
+ #
+ # @retval list The list path
+ #
+ @cached_property
+ def IncludePathList(self):
+ RetVal = []
+ RetVal.append(self.MetaFile.Dir)
+ RetVal.append(self.DebugDir)
+
+ for Package in self.PackageList:
+ PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
+ if PackageDir not in RetVal:
+ RetVal.append(PackageDir)
+ IncludesList = Package.Includes
+ if Package._PrivateIncludes:
+ if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):
+ IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
+ for Inc in IncludesList:
+ if Inc not in RetVal:
+ RetVal.append(str(Inc))
+ RetVal.extend(self.IncPathFromBuildOptions)
+ return RetVal
+
+ @cached_property
+ def IncPathFromBuildOptions(self):
+ IncPathList = []
+ for tool in self.BuildOption:
+ if 'FLAGS' in self.BuildOption[tool]:
+ flags = self.BuildOption[tool]['FLAGS']
+ whitespace = False
+ for flag in flags.split(" "):
+ flag = flag.strip()
+ if flag.startswith(("/I","-I")):
+ if len(flag)>2:
+ if os.path.exists(flag[2:]):
+ IncPathList.append(flag[2:])
+ else:
+ whitespace = True
+ continue
+ if whitespace and flag:
+ if os.path.exists(flag):
+ IncPathList.append(flag)
+ whitespace = False
+ return IncPathList
+
+ @cached_property
+ def IncludePathLength(self):
+ return sum(len(inc)+1 for inc in self.IncludePathList)
+
+ ## Get the list of include paths from the packages
+ #
+ # @IncludesList list The list path
+ #
+ @cached_property
+ def PackageIncludePathList(self):
+ IncludesList = []
+ for Package in self.PackageList:
+ PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
+ IncludesList = Package.Includes
+ if Package._PrivateIncludes:
+ if not self.MetaFile.Path.startswith(PackageDir):
+ IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
+ return IncludesList
+
+ ## Get HII EX PCDs which maybe used by VFR
+ #
+ # efivarstore used by VFR may relate with HII EX PCDs
+ # Get the variable name and GUID from efivarstore and HII EX PCD
+ # List the HII EX PCDs in As Built INF if both name and GUID match.
+ #
+ # @retval list HII EX PCDs
+ #
+ def _GetPcdsMaybeUsedByVfr(self):
+ if not self.SourceFileList:
+ return []
+
+ NameGuids = set()
+ for SrcFile in self.SourceFileList:
+ if SrcFile.Ext.lower() != '.vfr':
+ continue
+ Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')
+ if not os.path.exists(Vfri):
+ continue
+ VfriFile = open(Vfri, 'r')
+ Content = VfriFile.read()
+ VfriFile.close()
+ Pos = Content.find('efivarstore')
+ while Pos != -1:
+ #
+ # Make sure 'efivarstore' is the start of efivarstore statement
+ # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
+ #
+ Index = Pos - 1
+ while Index >= 0 and Content[Index] in ' \t\r\n':
+ Index -= 1
+ if Index >= 0 and Content[Index] != ';':
+ Pos = Content.find('efivarstore', Pos + len('efivarstore'))
+ continue
+ #
+ # 'efivarstore' must be followed by name and guid
+ #
+ Name = gEfiVarStoreNamePattern.search(Content, Pos)
+ if not Name:
+ break
+ Guid = gEfiVarStoreGuidPattern.search(Content, Pos)
+ if not Guid:
+ break
+ NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')
+ NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))
+ Pos = Content.find('efivarstore', Name.end())
+ if not NameGuids:
+ return []
+ HiiExPcds = []
+ for Pcd in self.PlatformInfo.Pcds.values():
+ if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:
+ continue
+ for SkuInfo in Pcd.SkuInfoList.values():
+ Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)
+ if not Value:
+ continue
+ Name = _ConvertStringToByteArray(SkuInfo.VariableName)
+ Guid = GuidStructureStringToGuidString(Value)
+ if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:
+ HiiExPcds.append(Pcd)
+ break
+
+ return HiiExPcds
+
+ def _GenOffsetBin(self):
+ VfrUniBaseName = {}
+ for SourceFile in self.Module.Sources:
+ if SourceFile.Type.upper() == ".VFR" :
+ #
+ # search the .map file to find the offset of vfr binary in the PE32+/TE file.
+ #
+ VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
+ elif SourceFile.Type.upper() == ".UNI" :
+ #
+ # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
+ #
+ VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")
+
+ if not VfrUniBaseName:
+ return None
+ MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
+ EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
+ VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
+ if not VfrUniOffsetList:
+ return None
+
+ OutputName = '%sOffset.bin' % self.Name
+ UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)
+
+ try:
+ fInputfile = open(UniVfrOffsetFileName, "wb+", 0)
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
+
+ # Use a instance of BytesIO to cache data
+ fStringIO = BytesIO()
+
+ for Item in VfrUniOffsetList:
+ if (Item[0].find("Strings") != -1):
+ #
+ # UNI offset in image.
+ # GUID + Offset
+ # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
+ #
+ UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
+ fStringIO.write(UniGuid)
+ UniValue = pack ('Q', int (Item[1], 16))
+ fStringIO.write (UniValue)
+ else:
+ #
+ # VFR binary offset in image.
+ # GUID + Offset
+ # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
+ #
+ VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
+ fStringIO.write(VfrGuid)
+ VfrValue = pack ('Q', int (Item[1], 16))
+ fStringIO.write (VfrValue)
+ #
+ # write data into file.
+ #
+ try :
+ fInputfile.write (fStringIO.getvalue())
+ except:
+ EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "
+ "file been locked or using by other applications." %UniVfrOffsetFileName, None)
+
+ fStringIO.close ()
+ fInputfile.close ()
+ return OutputName
+
+ @cached_property
+ def OutputFile(self):
+ retVal = set()
+
+ for Root, Dirs, Files in os.walk(self.BuildDir):
+ for File in Files:
+ # lib file is already added through above CodaTargetList, skip it here
+ if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):
+ NewFile = path.join(Root, File)
+ retVal.add(NewFile)
+
+ for Root, Dirs, Files in os.walk(self.FfsOutputDir):
+ for File in Files:
+ NewFile = path.join(Root, File)
+ retVal.add(NewFile)
+
+ return retVal
+
+ ## Create AsBuilt INF file the module
+ #
+ def CreateAsBuiltInf(self):
+
+ if self.IsAsBuiltInfCreated:
+ return
+
+ # Skip INF file generation for libraries
+ if self.IsLibrary:
+ return
+
+ # Skip the following code for modules with no source files
+ if not self.SourceFileList:
+ return
+
+ # Skip the following code for modules without any binary files
+ if self.BinaryFileList:
+ return
+
+ ### TODO: How to handles mixed source and binary modules
+
+ # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
+ # Also find all packages that the DynamicEx PCDs depend on
+ Pcds = []
+ PatchablePcds = []
+ Packages = []
+ PcdCheckList = []
+ PcdTokenSpaceList = []
+ for Pcd in self.ModulePcdList + self.LibraryPcdList:
+ if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ PatchablePcds.append(Pcd)
+ PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))
+ elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ if Pcd not in Pcds:
+ Pcds.append(Pcd)
+ PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))
+ PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))
+ PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)
+ GuidList = OrderedDict(self.GuidList)
+ for TokenSpace in self.GetGuidsUsedByPcd:
+ # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
+ # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
+ if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:
+ GuidList.pop(TokenSpace)
+ CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)
+ for Package in self.DerivedPackageList:
+ if Package in Packages:
+ continue
+ BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)
+ Found = False
+ for Index in range(len(BeChecked)):
+ for Item in CheckList[Index]:
+ if Item in BeChecked[Index]:
+ Packages.append(Package)
+ Found = True
+ break
+ if Found:
+ break
+
+ VfrPcds = self._GetPcdsMaybeUsedByVfr()
+ for Pkg in self.PlatformInfo.PackageList:
+ if Pkg in Packages:
+ continue
+ for VfrPcd in VfrPcds:
+ if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or
+ (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):
+ Packages.append(Pkg)
+ break
+
+ ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType
+ DriverType = self.PcdIsDriver if self.PcdIsDriver else ''
+ Guid = self.Guid
+ MDefs = self.Module.Defines
+
+ AsBuiltInfDict = {
+ 'module_name' : self.Name,
+ 'module_guid' : Guid,
+ 'module_module_type' : ModuleType,
+ 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],
+ 'pcd_is_driver_string' : [],
+ 'module_uefi_specification_version' : [],
+ 'module_pi_specification_version' : [],
+ 'module_entry_point' : self.Module.ModuleEntryPointList,
+ 'module_unload_image' : self.Module.ModuleUnloadImageList,
+ 'module_constructor' : self.Module.ConstructorList,
+ 'module_destructor' : self.Module.DestructorList,
+ 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],
+ 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],
+ 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],
+ 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],
+ 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],
+ 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],
+ 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],
+ 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],
+ 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],
+ 'module_arch' : self.Arch,
+ 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],
+ 'binary_item' : [],
+ 'patchablepcd_item' : [],
+ 'pcd_item' : [],
+ 'protocol_item' : [],
+ 'ppi_item' : [],
+ 'guid_item' : [],
+ 'flags_item' : [],
+ 'libraryclasses_item' : []
+ }
+
+ if 'MODULE_UNI_FILE' in MDefs:
+ UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])
+ if os.path.isfile(UNIFile):
+ shutil.copy2(UNIFile, self.OutputDir)
+
+ if self.AutoGenVersion > int(gInfSpecVersion, 0):
+ AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion
+ else:
+ AsBuiltInfDict['module_inf_version'] = gInfSpecVersion
+
+ if DriverType:
+ AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)
+
+ if 'UEFI_SPECIFICATION_VERSION' in self.Specification:
+ AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])
+ if 'PI_SPECIFICATION_VERSION' in self.Specification:
+ AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])
+
+ OutputDir = self.OutputDir.replace('\\', '/').strip('/')
+ DebugDir = self.DebugDir.replace('\\', '/').strip('/')
+ for Item in self.CodaTargetList:
+ File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')
+ if os.path.isabs(File):
+ File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
+ if Item.Target.Ext.lower() == '.aml':
+ AsBuiltInfDict['binary_item'].append('ASL|' + File)
+ elif Item.Target.Ext.lower() == '.acpi':
+ AsBuiltInfDict['binary_item'].append('ACPI|' + File)
+ elif Item.Target.Ext.lower() == '.efi':
+ AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')
+ else:
+ AsBuiltInfDict['binary_item'].append('BIN|' + File)
+ if not self.DepexGenerated:
+ DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')
+ if os.path.exists(DepexFile):
+ self.DepexGenerated = True
+ if self.DepexGenerated:
+ if self.ModuleType in [SUP_MODULE_PEIM]:
+ AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')
+ elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:
+ AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')
+ elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:
+ AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')
+
+ Bin = self._GenOffsetBin()
+ if Bin:
+ AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)
+
+ for Root, Dirs, Files in os.walk(OutputDir):
+ for File in Files:
+ if File.lower().endswith('.pdb'):
+ AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)
+ HeaderComments = self.Module.HeaderComments
+ StartPos = 0
+ for Index in range(len(HeaderComments)):
+ if HeaderComments[Index].find('@BinaryHeader') != -1:
+ HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')
+ StartPos = Index
+ break
+ AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')
+ AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)
+
+ GenList = [
+ (self.ProtocolList, self._ProtocolComments, 'protocol_item'),
+ (self.PpiList, self._PpiComments, 'ppi_item'),
+ (GuidList, self._GuidComments, 'guid_item')
+ ]
+ for Item in GenList:
+ for CName in Item[0]:
+ Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''
+ Entry = Comments + '\n ' + CName if Comments else CName
+ AsBuiltInfDict[Item[2]].append(Entry)
+ PatchList = parsePcdInfoFromMapFile(
+ os.path.join(self.OutputDir, self.Name + '.map'),
+ os.path.join(self.OutputDir, self.Name + '.efi')
+ )
+ if PatchList:
+ for Pcd in PatchablePcds:
+ TokenCName = Pcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ for PatchPcd in PatchList:
+ if TokenCName == PatchPcd[0]:
+ break
+ else:
+ continue
+ PcdValue = ''
+ if Pcd.DatumType == 'BOOLEAN':
+ BoolValue = Pcd.DefaultValue.upper()
+ if BoolValue == 'TRUE':
+ Pcd.DefaultValue = '1'
+ elif BoolValue == 'FALSE':
+ Pcd.DefaultValue = '0'
+
+ if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
+ HexFormat = '0x%02x'
+ if Pcd.DatumType == TAB_UINT16:
+ HexFormat = '0x%04x'
+ elif Pcd.DatumType == TAB_UINT32:
+ HexFormat = '0x%08x'
+ elif Pcd.DatumType == TAB_UINT64:
+ HexFormat = '0x%016x'
+ PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
+ else:
+ if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)
+ )
+ ArraySize = int(Pcd.MaxDatumSize, 0)
+ PcdValue = Pcd.DefaultValue
+ if PcdValue[0] != '{':
+ Unicode = False
+ if PcdValue[0] == 'L':
+ Unicode = True
+ PcdValue = PcdValue.lstrip('L')
+ PcdValue = eval(PcdValue)
+ NewValue = '{'
+ for Index in range(0, len(PcdValue)):
+ if Unicode:
+ CharVal = ord(PcdValue[Index])
+ NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \
+ + '0x%02x' % (CharVal >> 8) + ', '
+ else:
+ NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '
+ Padding = '0x00, '
+ if Unicode:
+ Padding = Padding * 2
+ ArraySize = ArraySize // 2
+ if ArraySize < (len(PcdValue) + 1):
+ if Pcd.MaxSizeUserSet:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
+ )
+ else:
+ ArraySize = len(PcdValue) + 1
+ if ArraySize > len(PcdValue) + 1:
+ NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)
+ PcdValue = NewValue + Padding.strip().rstrip(',') + '}'
+ elif len(PcdValue.split(',')) <= ArraySize:
+ PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))
+ PcdValue += '}'
+ else:
+ if Pcd.MaxSizeUserSet:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
+ )
+ else:
+ ArraySize = len(PcdValue) + 1
+ PcdItem = '%s.%s|%s|0x%X' % \
+ (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])
+ PcdComments = ''
+ if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
+ PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])
+ if PcdComments:
+ PcdItem = PcdComments + '\n ' + PcdItem
+ AsBuiltInfDict['patchablepcd_item'].append(PcdItem)
+
+ for Pcd in Pcds + VfrPcds:
+ PcdCommentList = []
+ HiiInfo = ''
+ TokenCName = Pcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:
+ for SkuName in Pcd.SkuInfoList:
+ SkuInfo = Pcd.SkuInfoList[SkuName]
+ HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)
+ break
+ if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
+ PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]
+ if HiiInfo:
+ UsageIndex = -1
+ UsageStr = ''
+ for Index, Comment in enumerate(PcdCommentList):
+ for Usage in UsageList:
+ if Comment.find(Usage) != -1:
+ UsageStr = Usage
+ UsageIndex = Index
+ break
+ if UsageIndex != -1:
+ PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
+ else:
+ PcdCommentList.append('## UNDEFINED ' + HiiInfo)
+ PcdComments = '\n '.join(PcdCommentList)
+ PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName
+ if PcdComments:
+ PcdEntry = PcdComments + '\n ' + PcdEntry
+ AsBuiltInfDict['pcd_item'].append(PcdEntry)
+ for Item in self.BuildOption:
+ if 'FLAGS' in self.BuildOption[Item]:
+ AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))
+
+ # Generated LibraryClasses section in comments.
+ for Library in self.LibraryAutoGenList:
+ AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))
+
+ # Generated UserExtensions TianoCore section.
+ # All tianocore user extensions are copied.
+ UserExtStr = ''
+ for TianoCore in self._GetTianoCoreUserExtensionList():
+ UserExtStr += '\n'.join(TianoCore)
+ ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])
+ if os.path.isfile(ExtensionFile):
+ shutil.copy2(ExtensionFile, self.OutputDir)
+ AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr
+
+ # Generated depex expression section in comments.
+ DepexExpression = self._GetDepexExpresionString()
+ AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''
+
+ AsBuiltInf = TemplateString()
+ AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
+
+ SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)
+
+ self.IsAsBuiltInfCreated = True
+
+ def CacheCopyFile(self, DestDir, SourceDir, File):
+ if os.path.isdir(File):
+ return
+
+ sub_dir = os.path.relpath(File, SourceDir)
+ destination_file = os.path.join(DestDir, sub_dir)
+ destination_dir = os.path.dirname(destination_file)
+ CreateDirectory(destination_dir)
+ try:
+ CopyFileOnChange(File, destination_dir)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))
+ return
+
+ def CopyModuleToCache(self):
+ # Find the MakeHashStr and PreMakeHashStr from latest MakeHashFileList
+ # and PreMakeHashFileList files
+ MakeHashStr = None
+ PreMakeHashStr = None
+ MakeTimeStamp = 0
+ PreMakeTimeStamp = 0
+ Files = [f for f in os.listdir(LongFilePath(self.BuildDir)) if path.isfile(LongFilePath(path.join(self.BuildDir, f)))]
+ for File in Files:
+ if ".MakeHashFileList." in File:
+ #find lastest file through time stamp
+ FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]
+ if FileTimeStamp > MakeTimeStamp:
+ MakeTimeStamp = FileTimeStamp
+ MakeHashStr = File.split('.')[-1]
+ if len(MakeHashStr) != 32:
+ EdkLogger.quiet("[cache error]: wrong MakeHashFileList file:%s" % (File))
+ if ".PreMakeHashFileList." in File:
+ FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]
+ if FileTimeStamp > PreMakeTimeStamp:
+ PreMakeTimeStamp = FileTimeStamp
+ PreMakeHashStr = File.split('.')[-1]
+ if len(PreMakeHashStr) != 32:
+ EdkLogger.quiet("[cache error]: wrong PreMakeHashFileList file:%s" % (File))
+
+ if not MakeHashStr:
+ EdkLogger.quiet("[cache error]: No MakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))
+ return
+ if not PreMakeHashStr:
+ EdkLogger.quiet("[cache error]: No PreMakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))
+ return
+
+ # Create Cache destination dirs
+ FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
+ FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
+ CacheFileDir = path.join(FileDir, MakeHashStr)
+ CacheFfsDir = path.join(FfsDir, MakeHashStr)
+ CreateDirectory (CacheFileDir)
+ CreateDirectory (CacheFfsDir)
+
+ # Create ModuleHashPair file to support multiple version cache together
+ ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
+ if os.path.exists(ModuleHashPair):
+ with open(ModuleHashPair, 'r') as f:
+ ModuleHashPairList = json.load(f)
+ if not (PreMakeHashStr, MakeHashStr) in set(map(tuple, ModuleHashPairList)):
+ ModuleHashPairList.insert(0, (PreMakeHashStr, MakeHashStr))
+ with open(ModuleHashPair, 'w') as f:
+ json.dump(ModuleHashPairList, f, indent=2)
+
+ # Copy files to Cache destination dirs
+ if not self.OutputFile:
+ Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
+ self.OutputFile = Ma.Binaries
+ for File in self.OutputFile:
+ if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):
+ self.CacheCopyFile(CacheFfsDir, self.FfsOutputDir, File)
+ else:
+ if self.Name + ".autogen.hash." in File or \
+ self.Name + ".autogen.hashchain." in File or \
+ self.Name + ".hash." in File or \
+ self.Name + ".hashchain." in File or \
+ self.Name + ".PreMakeHashFileList." in File or \
+ self.Name + ".MakeHashFileList." in File:
+ self.CacheCopyFile(FileDir, self.BuildDir, File)
+ else:
+ self.CacheCopyFile(CacheFileDir, self.BuildDir, File)
+ ## Create makefile for the module and its dependent libraries
+ #
+ # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
+ # dependent libraries will be created
+ #
+ @cached_class_function
+ def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):
+
+ # nest this function inside it's only caller.
+ def CreateTimeStamp():
+ FileSet = {self.MetaFile.Path}
+
+ for SourceFile in self.Module.Sources:
+ FileSet.add (SourceFile.Path)
+
+ for Lib in self.DependentLibraryList:
+ FileSet.add (Lib.MetaFile.Path)
+
+ for f in self.AutoGenDepSet:
+ FileSet.add (f.Path)
+
+ if os.path.exists (self.TimeStampPath):
+ os.remove (self.TimeStampPath)
+
+ SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)
+
+ # Ignore generating makefile when it is a binary module
+ if self.IsBinaryModule:
+ return
+
+ self.GenFfsList = GenFfsList
+
+ if not self.IsLibrary and CreateLibraryMakeFile:
+ for LibraryAutoGen in self.LibraryAutoGenList:
+ LibraryAutoGen.CreateMakeFile()
+
+ # CanSkip uses timestamps to determine build skipping
+ if self.CanSkip():
+ return
+
+ if len(self.CustomMakefile) == 0:
+ Makefile = GenMake.ModuleMakefile(self)
+ else:
+ Makefile = GenMake.CustomMakefile(self)
+ if Makefile.Generate():
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %
+ (self.Name, self.Arch))
+ else:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %
+ (self.Name, self.Arch))
+
+ CreateTimeStamp()
+
+ MakefileType = Makefile._FileType
+ MakefileName = Makefile._FILE_NAME_[MakefileType]
+ MakefilePath = os.path.join(self.MakeFileDir, MakefileName)
+ FilePath = path.join(self.BuildDir, self.Name + ".makefile")
+ SaveFileOnChange(FilePath, MakefilePath, False)
+
+ def CopyBinaryFiles(self):
+ for File in self.Module.Binaries:
+ SrcPath = File.Path
+ DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))
+ CopyLongFilePath(SrcPath, DstPath)
+ ## Create autogen code for the module and its dependent libraries
+ #
+ # @param CreateLibraryCodeFile Flag indicating if or not the code of
+ # dependent libraries will be created
+ #
+ def CreateCodeFile(self, CreateLibraryCodeFile=True):
+
+ if self.IsCodeFileCreated:
+ return
+
+ # Need to generate PcdDatabase even PcdDriver is binarymodule
+ if self.IsBinaryModule and self.PcdIsDriver != '':
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
+ return
+ if self.IsBinaryModule:
+ if self.IsLibrary:
+ self.CopyBinaryFiles()
+ return
+
+ if not self.IsLibrary and CreateLibraryCodeFile:
+ for LibraryAutoGen in self.LibraryAutoGenList:
+ LibraryAutoGen.CreateCodeFile()
+
+ self.LibraryAutoGenList
+ AutoGenList = []
+ IgoredAutoGenList = []
+
+ for File in self.AutoGenFileList:
+ if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):
+ AutoGenList.append(str(File))
+ else:
+ IgoredAutoGenList.append(str(File))
+
+
+ for ModuleType in self.DepexList:
+ # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
+ if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:
+ continue
+
+ Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)
+ DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}
+
+ if len(Dpx.PostfixNotation) != 0:
+ self.DepexGenerated = True
+
+ if Dpx.Generate(path.join(self.OutputDir, DpxFile)):
+ AutoGenList.append(str(DpxFile))
+ else:
+ IgoredAutoGenList.append(str(DpxFile))
+
+ if IgoredAutoGenList == []:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %
+ (" ".join(AutoGenList), self.Name, self.Arch))
+ elif AutoGenList == []:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %
+ (" ".join(IgoredAutoGenList), self.Name, self.Arch))
+ else:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %
+ (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))
+
+ self.IsCodeFileCreated = True
+
+ return AutoGenList
+
+ ## Summarize the ModuleAutoGen objects of all libraries used by this module
+ @cached_property
+ def LibraryAutoGenList(self):
+ RetVal = []
+ for Library in self.DependentLibraryList:
+ La = ModuleAutoGen(
+ self.Workspace,
+ Library.MetaFile,
+ self.BuildTarget,
+ self.ToolChain,
+ self.Arch,
+ self.PlatformInfo.MetaFile,
+ self.DataPipe
+ )
+ La.IsLibrary = True
+ if La not in RetVal:
+ RetVal.append(La)
+ for Lib in La.CodaTargetList:
+ self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)
+ return RetVal
+
+ def GenCMakeHash(self):
+ # GenCMakeHash can only be called in --binary-destination
+ # Never called in multiprocessing and always directly save result in main process,
+ # so no need remote dict to share the gCMakeHashFile result with main process
+
+ DependencyFileSet = set()
+ # Add AutoGen files
+ if self.AutoGenFileList:
+ for File in set(self.AutoGenFileList):
+ DependencyFileSet.add(File)
+
+ # Add Makefile
+ abspath = path.join(self.BuildDir, self.Name + ".makefile")
+ try:
+ with open(LongFilePath(abspath),"r") as fd:
+ lines = fd.readlines()
+ except Exception as e:
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
+ if lines:
+ DependencyFileSet.update(lines)
+
+ # Caculate all above dependency files hash
+ # Initialze hash object
+ FileList = []
+ m = hashlib.md5()
+ for File in sorted(DependencyFileSet, key=lambda x: str(x)):
+ if not path.exists(LongFilePath(str(File))):
+ EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
+ continue
+ with open(LongFilePath(str(File)), 'rb') as f:
+ Content = f.read()
+ m.update(Content)
+ FileList.append((str(File), hashlib.md5(Content).hexdigest()))
+
+ HashChainFile = path.join(self.BuildDir, self.Name + ".autogen.hashchain." + m.hexdigest())
+ GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
+ try:
+ with open(LongFilePath(HashChainFile), 'w') as f:
+ json.dump(FileList, f, indent=2)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
+ return False
+
+ def GenModuleHash(self):
+ # GenModuleHash only called after autogen phase
+ # Never called in multiprocessing and always directly save result in main process,
+ # so no need remote dict to share the gModuleHashFile result with main process
+ #
+ # GenPreMakefileHashList consume no dict.
+ # GenPreMakefileHashList produce local gModuleHashFile dict.
+
+ DependencyFileSet = set()
+ # Add Module Meta file
+ DependencyFileSet.add(self.MetaFile.Path)
+
+ # Add Module's source files
+ if self.SourceFileList:
+ for File in set(self.SourceFileList):
+ DependencyFileSet.add(File.Path)
+
+ # Add modules's include header files
+ # Directly use the deps.txt file in the module BuildDir
+ abspath = path.join(self.BuildDir, "deps.txt")
+ rt = None
+ try:
+ with open(LongFilePath(abspath),"r") as fd:
+ lines = fd.readlines()
+ if lines:
+ rt = set([item.lstrip().strip("\n") for item in lines if item.strip("\n").endswith(".h")])
+ except Exception as e:
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
+
+ if rt:
+ DependencyFileSet.update(rt)
+
+
+ # Caculate all above dependency files hash
+ # Initialze hash object
+ FileList = []
+ m = hashlib.md5()
+ BuildDirStr = path.abspath(self.BuildDir).lower()
+ for File in sorted(DependencyFileSet, key=lambda x: str(x)):
+ # Skip the AutoGen files in BuildDir which already been
+ # included in .autogen.hash. file
+ if BuildDirStr in path.abspath(File).lower():
+ continue
+ if not path.exists(LongFilePath(File)):
+ EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
+ continue
+ with open(LongFilePath(File), 'rb') as f:
+ Content = f.read()
+ m.update(Content)
+ FileList.append((File, hashlib.md5(Content).hexdigest()))
+
+ HashChainFile = path.join(self.BuildDir, self.Name + ".hashchain." + m.hexdigest())
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
+ try:
+ with open(LongFilePath(HashChainFile), 'w') as f:
+ json.dump(FileList, f, indent=2)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
+ return False
+
+ def GenPreMakefileHashList(self):
+ # GenPreMakefileHashList consume below dicts:
+ # gPlatformHashFile
+ # gPackageHashFile
+ # gModuleHashFile
+ # GenPreMakefileHashList produce no dict.
+ # gModuleHashFile items might be produced in multiprocessing, so
+ # need check gModuleHashFile remote dict
+
+ # skip binary module
+ if self.IsBinaryModule:
+ return
+
+ FileList = []
+ m = hashlib.md5()
+ # Add Platform level hash
+ HashFile = GlobalData.gPlatformHashFile
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]: No Platform HashFile: %s" % HashFile)
+
+ # Add Package level hash
+ if self.DependentPackageList:
+ for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
+ if not (Pkg.PackageName, Pkg.Arch) in GlobalData.gPackageHashFile:
+ EdkLogger.quiet("[cache warning]:No Package %s for module %s[%s]" % (Pkg.PackageName, self.MetaFile.Path, self.Arch))
+ continue
+ HashFile = GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)]
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]:No Package HashFile: %s" % HashFile)
+
+ # Add Module self
+ # GenPreMakefileHashList needed in both --binary-destination
+ # and --hash. And --hash might save ModuleHashFile in remote dict
+ # during multiprocessing.
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:
+ HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]
+ else:
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)
+
+ # Add Library hash
+ if self.LibraryAutoGenList:
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):
+
+ if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:
+ HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]
+ else:
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)
+
+ # Save PreMakeHashFileList
+ FilePath = path.join(self.BuildDir, self.Name + ".PreMakeHashFileList." + m.hexdigest())
+ try:
+ with open(LongFilePath(FilePath), 'w') as f:
+ json.dump(FileList, f, indent=0)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to save PreMake HashFileList: %s" % FilePath)
+
+ def GenMakefileHashList(self):
+ # GenMakefileHashList only need in --binary-destination which will
+ # everything in local dict. So don't need check remote dict.
+
+ # skip binary module
+ if self.IsBinaryModule:
+ return
+
+ FileList = []
+ m = hashlib.md5()
+ # Add AutoGen hash
+ HashFile = GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)]
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]:No AutoGen HashFile: %s" % HashFile)
+
+ # Add Module self
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:
+ HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]
+ else:
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)
+
+ # Add Library hash
+ if self.LibraryAutoGenList:
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):
+ if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:
+ HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]
+ else:
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)
+
+ # Save MakeHashFileList
+ FilePath = path.join(self.BuildDir, self.Name + ".MakeHashFileList." + m.hexdigest())
+ try:
+ with open(LongFilePath(FilePath), 'w') as f:
+ json.dump(FileList, f, indent=0)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to save Make HashFileList: %s" % FilePath)
+
+ def CheckHashChainFile(self, HashChainFile):
+ # Assume the HashChainFile basename format is the 'x.hashchain.16BytesHexStr'
+ # The x is module name and the 16BytesHexStr is md5 hexdigest of
+ # all hashchain files content
+ HashStr = HashChainFile.split('.')[-1]
+ if len(HashStr) != 32:
+ EdkLogger.quiet("[cache error]: wrong format HashChainFile:%s" % (File))
+ return False
+
+ try:
+ with open(LongFilePath(HashChainFile), 'r') as f:
+ HashChainList = json.load(f)
+ except:
+ EdkLogger.quiet("[cache error]: fail to load HashChainFile: %s" % HashChainFile)
+ return False
+
+ # Print the different file info
+ # print(HashChainFile)
+ for idx, (SrcFile, SrcHash) in enumerate (HashChainList):
+ if SrcFile in GlobalData.gFileHashDict:
+ DestHash = GlobalData.gFileHashDict[SrcFile]
+ else:
+ try:
+ with open(LongFilePath(SrcFile), 'rb') as f:
+ Content = f.read()
+ DestHash = hashlib.md5(Content).hexdigest()
+ GlobalData.gFileHashDict[SrcFile] = DestHash
+ except IOError as X:
+ # cache miss if SrcFile is removed in new version code
+ GlobalData.gFileHashDict[SrcFile] = 0
+ EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))
+ return False
+ if SrcHash != DestHash:
+ EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))
+ return False
+
+ return True
+
+ ## Decide whether we can skip the left autogen and make process
+ def CanSkipbyMakeCache(self):
+ # For --binary-source only
+ # CanSkipbyMakeCache consume below dicts:
+ # gModuleMakeCacheStatus
+ # gHashChainStatus
+ # GenPreMakefileHashList produce gModuleMakeCacheStatus, gModuleHashFile dict.
+ # all these dicts might be produced in multiprocessing, so
+ # need check these remote dict
+
+ if not GlobalData.gBinCacheSource:
+ return False
+
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleMakeCacheStatus:
+ return GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)]
+
+ # If Module is binary, which has special build rule, do not skip by cache.
+ if self.IsBinaryModule:
+ print("[cache miss]: MakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ # see .inc as binary file, do not skip by hash
+ for f_ext in self.SourceFileList:
+ if '.inc' in str(f_ext):
+ print("[cache miss]: MakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
+ FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
+
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
+ ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")
+ try:
+ with open(LongFilePath(ModuleHashPair), 'r') as f:
+ ModuleHashPairList = json.load(f)
+ except:
+ # ModuleHashPair might not exist for new added module
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
+ print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)
+ return False
+
+ # Check the PreMakeHash in ModuleHashPairList one by one
+ for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
+ SourceHashDir = path.join(ModuleCacheDir, MakeHash)
+ SourceFfsHashDir = path.join(FfsDir, MakeHash)
+ PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)
+ MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)
+
+ try:
+ with open(LongFilePath(MakeHashFileList_FilePah), 'r') as f:
+ MakeHashFileList = json.load(f)
+ except:
+ EdkLogger.quiet("[cache error]: fail to load MakeHashFileList file: %s" % MakeHashFileList_FilePah)
+ continue
+
+ HashMiss = False
+ for HashChainFile in MakeHashFileList:
+ HashChainStatus = None
+ if HashChainFile in GlobalData.gHashChainStatus:
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
+ if HashChainStatus == False:
+ HashMiss = True
+ break
+ elif HashChainStatus == True:
+ continue
+ # Convert to path start with cache source dir
+ RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)
+ NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)
+ if self.CheckHashChainFile(NewFilePath):
+ GlobalData.gHashChainStatus[HashChainFile] = True
+ # Save the module self HashFile for GenPreMakefileHashList later usage
+ if self.Name + ".hashchain." in HashChainFile:
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
+ else:
+ GlobalData.gHashChainStatus[HashChainFile] = False
+ HashMiss = True
+ break
+
+ if HashMiss:
+ continue
+
+ # PreMakefile cache hit, restore the module build result
+ for root, dir, files in os.walk(SourceHashDir):
+ for f in files:
+ File = path.join(root, f)
+ self.CacheCopyFile(self.BuildDir, SourceHashDir, File)
+ if os.path.exists(SourceFfsHashDir):
+ for root, dir, files in os.walk(SourceFfsHashDir):
+ for f in files:
+ File = path.join(root, f)
+ self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)
+
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
+
+ print("[cache hit]: MakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
+ return True
+
+ print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ ## Decide whether we can skip the left autogen and make process
+ def CanSkipbyPreMakeCache(self):
+ # CanSkipbyPreMakeCache consume below dicts:
+ # gModulePreMakeCacheStatus
+ # gHashChainStatus
+ # gModuleHashFile
+ # GenPreMakefileHashList produce gModulePreMakeCacheStatus dict.
+ # all these dicts might be produced in multiprocessing, so
+ # need check these remote dicts
+
+ if not GlobalData.gUseHashCache or GlobalData.gBinCacheDest:
+ return False
+
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModulePreMakeCacheStatus:
+ return GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)]
+
+ # If Module is binary, which has special build rule, do not skip by cache.
+ if self.IsBinaryModule:
+ print("[cache miss]: PreMakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ # see .inc as binary file, do not skip by hash
+ for f_ext in self.SourceFileList:
+ if '.inc' in str(f_ext):
+ print("[cache miss]: PreMakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ # For --hash only in the incremental build
+ if not GlobalData.gBinCacheSource:
+ Files = [path.join(self.BuildDir, f) for f in os.listdir(self.BuildDir) if path.isfile(path.join(self.BuildDir, f))]
+ PreMakeHashFileList_FilePah = None
+ MakeTimeStamp = 0
+ # Find latest PreMakeHashFileList file in self.BuildDir folder
+ for File in Files:
+ if ".PreMakeHashFileList." in File:
+ FileTimeStamp = os.stat(path.join(self.BuildDir, File))[8]
+ if FileTimeStamp > MakeTimeStamp:
+ MakeTimeStamp = FileTimeStamp
+ PreMakeHashFileList_FilePah = File
+ if not PreMakeHashFileList_FilePah:
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ try:
+ with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:
+ PreMakeHashFileList = json.load(f)
+ except:
+ EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ HashMiss = False
+ for HashChainFile in PreMakeHashFileList:
+ HashChainStatus = None
+ if HashChainFile in GlobalData.gHashChainStatus:
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
+ if HashChainStatus == False:
+ HashMiss = True
+ break
+ elif HashChainStatus == True:
+ continue
+ if self.CheckHashChainFile(HashChainFile):
+ GlobalData.gHashChainStatus[HashChainFile] = True
+ # Save the module self HashFile for GenPreMakefileHashList later usage
+ if self.Name + ".hashchain." in HashChainFile:
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
+ else:
+ GlobalData.gHashChainStatus[HashChainFile] = False
+ HashMiss = True
+ break
+
+ if HashMiss:
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+ else:
+ print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
+ return True
+
+ ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
+ FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
+
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
+ ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")
+ try:
+ with open(LongFilePath(ModuleHashPair), 'r') as f:
+ ModuleHashPairList = json.load(f)
+ except:
+ # ModuleHashPair might not exist for new added module
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
+ return False
+
+ # Check the PreMakeHash in ModuleHashPairList one by one
+ for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
+ SourceHashDir = path.join(ModuleCacheDir, MakeHash)
+ SourceFfsHashDir = path.join(FfsDir, MakeHash)
+ PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)
+ MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)
+
+ try:
+ with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:
+ PreMakeHashFileList = json.load(f)
+ except:
+ EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)
+ continue
+
+ HashMiss = False
+ for HashChainFile in PreMakeHashFileList:
+ HashChainStatus = None
+ if HashChainFile in GlobalData.gHashChainStatus:
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
+ if HashChainStatus == False:
+ HashMiss = True
+ break
+ elif HashChainStatus == True:
+ continue
+ # Convert to path start with cache source dir
+ RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)
+ NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)
+ if self.CheckHashChainFile(NewFilePath):
+ GlobalData.gHashChainStatus[HashChainFile] = True
+ else:
+ GlobalData.gHashChainStatus[HashChainFile] = False
+ HashMiss = True
+ break
+
+ if HashMiss:
+ continue
+
+ # PreMakefile cache hit, restore the module build result
+ for root, dir, files in os.walk(SourceHashDir):
+ for f in files:
+ File = path.join(root, f)
+ self.CacheCopyFile(self.BuildDir, SourceHashDir, File)
+ if os.path.exists(SourceFfsHashDir):
+ for root, dir, files in os.walk(SourceFfsHashDir):
+ for f in files:
+ File = path.join(root, f)
+ self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)
+
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
+
+ print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
+ return True
+
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ ## Decide whether we can skip the Module build
+ def CanSkipbyCache(self, gHitSet):
+ # Hashing feature is off
+ if not GlobalData.gBinCacheSource:
+ return False
+
+ if self in gHitSet:
+ return True
+
+ return False
+
+ ## Decide whether we can skip the ModuleAutoGen process
+ # If any source file is newer than the module than we cannot skip
+ #
+ def CanSkip(self):
+ # Don't skip if cache feature enabled
+ if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:
+ return False
+ if self.MakeFileDir in GlobalData.gSikpAutoGenCache:
+ return True
+ if not os.path.exists(self.TimeStampPath):
+ return False
+ #last creation time of the module
+ DstTimeStamp = os.stat(self.TimeStampPath)[8]
+
+ SrcTimeStamp = self.Workspace._SrcTimeStamp
+ if SrcTimeStamp > DstTimeStamp:
+ return False
+
+ with open(self.TimeStampPath,'r') as f:
+ for source in f:
+ source = source.rstrip('\n')
+ if not os.path.exists(source):
+ return False
+ if source not in ModuleAutoGen.TimeDict :
+ ModuleAutoGen.TimeDict[source] = os.stat(source)[8]
+ if ModuleAutoGen.TimeDict[source] > DstTimeStamp:
+ return False
+ GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)
+ return True
+
+ @cached_property
+ def TimeStampPath(self):
+ return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py
new file mode 100755
index 00000000..09168047
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py
@@ -0,0 +1,674 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2019 - 2021, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from __future__ import absolute_import
+from Workspace.WorkspaceDatabase import WorkspaceDatabase,BuildDB
+from Common.caching import cached_property
+from AutoGen.BuildEngine import BuildRule,AutoGenReqBuildRuleVerNum
+from AutoGen.AutoGen import CalculatePriorityValue
+from Common.Misc import CheckPcdDatum,GuidValue
+from Common.Expression import ValueExpressionEx
+from Common.DataType import *
+from CommonDataClass.Exceptions import *
+from CommonDataClass.CommonClass import SkuInfoClass
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import OPTION_CONFLICT,FORMAT_INVALID,RESOURCE_NOT_AVAILABLE
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from collections import defaultdict
+from Common.Misc import PathClass
+import os
+
+
+#
+# The priority list while override build option
+#
+PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)
+ "0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
+ "0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
+ "0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE
+ "0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE
+ "0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE
+ "0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE
+ "0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE
+ "0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE
+ "0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE
+ "0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE
+ "0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE
+ "0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)
+## Base class for AutoGen
+#
+# This class just implements the cache mechanism of AutoGen objects.
+#
+class AutoGenInfo(object):
+ # database to maintain the objects in each child class
+ __ObjectCache = {} # (BuildTarget, ToolChain, ARCH, platform file): AutoGen object
+
+ ## Factory method
+ #
+ # @param Class class object of real AutoGen class
+ # (WorkspaceAutoGen, ModuleAutoGen or PlatformAutoGen)
+ # @param Workspace Workspace directory or WorkspaceAutoGen object
+ # @param MetaFile The path of meta file
+ # @param Target Build target
+ # @param Toolchain Tool chain name
+ # @param Arch Target arch
+ # @param *args The specific class related parameters
+ # @param **kwargs The specific class related dict parameters
+ #
+ @classmethod
+ def GetCache(cls):
+ return cls.__ObjectCache
+ def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+ # check if the object has been created
+ Key = (Target, Toolchain, Arch, MetaFile)
+ if Key in cls.__ObjectCache:
+ # if it exists, just return it directly
+ return cls.__ObjectCache[Key]
+ # it didnt exist. create it, cache it, then return it
+ RetVal = cls.__ObjectCache[Key] = super(AutoGenInfo, cls).__new__(cls)
+ return RetVal
+
+
+ ## hash() operator
+ #
+ # The file path of platform file will be used to represent hash value of this object
+ #
+ # @retval int Hash value of the file path of platform file
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
+
+ ## str() operator
+ #
+ # The file path of platform file will be used to represent this object
+ #
+ # @retval string String of platform file path
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## "==" operator
+ def __eq__(self, Other):
+ return Other and self.MetaFile == Other
+
+ ## Expand * in build option key
+ #
+ # @param Options Options to be expanded
+ # @param ToolDef Use specified ToolDef instead of full version.
+ # This is needed during initialization to prevent
+ # infinite recursion betweeh BuildOptions,
+ # ToolDefinition, and this function.
+ #
+ # @retval options Options expanded
+ #
+ def _ExpandBuildOption(self, Options, ModuleStyle=None, ToolDef=None):
+ if not ToolDef:
+ ToolDef = self.ToolDefinition
+ BuildOptions = {}
+ FamilyMatch = False
+ FamilyIsNull = True
+
+ OverrideList = {}
+ #
+ # Construct a list contain the build options which need override.
+ #
+ for Key in Options:
+ #
+ # Key[0] -- tool family
+ # Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
+ #
+ if (Key[0] == self.BuildRuleFamily and
+ (ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
+ Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
+ if (Target == self.BuildTarget or Target == TAB_STAR) and\
+ (ToolChain == self.ToolChain or ToolChain == TAB_STAR) and\
+ (Arch == self.Arch or Arch == TAB_STAR) and\
+ Options[Key].startswith("="):
+
+ if OverrideList.get(Key[1]) is not None:
+ OverrideList.pop(Key[1])
+ OverrideList[Key[1]] = Options[Key]
+
+ #
+ # Use the highest priority value.
+ #
+ if (len(OverrideList) >= 2):
+ KeyList = list(OverrideList.keys())
+ for Index in range(len(KeyList)):
+ NowKey = KeyList[Index]
+ Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
+ for Index1 in range(len(KeyList) - Index - 1):
+ NextKey = KeyList[Index1 + Index + 1]
+ #
+ # Compare two Key, if one is included by another, choose the higher priority one
+ #
+ Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")
+ if (Target1 == Target2 or Target1 == TAB_STAR or Target2 == TAB_STAR) and\
+ (ToolChain1 == ToolChain2 or ToolChain1 == TAB_STAR or ToolChain2 == TAB_STAR) and\
+ (Arch1 == Arch2 or Arch1 == TAB_STAR or Arch2 == TAB_STAR) and\
+ (CommandType1 == CommandType2 or CommandType1 == TAB_STAR or CommandType2 == TAB_STAR) and\
+ (Attr1 == Attr2 or Attr1 == TAB_STAR or Attr2 == TAB_STAR):
+
+ if CalculatePriorityValue(NowKey) > CalculatePriorityValue(NextKey):
+ if Options.get((self.BuildRuleFamily, NextKey)) is not None:
+ Options.pop((self.BuildRuleFamily, NextKey))
+ else:
+ if Options.get((self.BuildRuleFamily, NowKey)) is not None:
+ Options.pop((self.BuildRuleFamily, NowKey))
+
+ for Key in Options:
+ if ModuleStyle is not None and len (Key) > 2:
+ # Check Module style is EDK or EDKII.
+ # Only append build option for the matched style module.
+ if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
+ continue
+ elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
+ continue
+ Family = Key[0]
+ Target, Tag, Arch, Tool, Attr = Key[1].split("_")
+ # if tool chain family doesn't match, skip it
+ if Family != "":
+ Found = False
+ if Tool in ToolDef:
+ FamilyIsNull = False
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDef[Tool]:
+ if Family == ToolDef[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
+ FamilyMatch = True
+ Found = True
+ if TAB_STAR in ToolDef:
+ FamilyIsNull = False
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDef[TAB_STAR]:
+ if Family == ToolDef[TAB_STAR][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
+ FamilyMatch = True
+ Found = True
+ if not Found:
+ continue
+ # expand any wildcard
+ if Target == TAB_STAR or Target == self.BuildTarget:
+ if Tag == TAB_STAR or Tag == self.ToolChain:
+ if Arch == TAB_STAR or Arch == self.Arch:
+ if Tool not in BuildOptions:
+ BuildOptions[Tool] = {}
+ if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
+ BuildOptions[Tool][Attr] = Options[Key]
+ else:
+ # append options for the same tool except PATH
+ if Attr != 'PATH':
+ BuildOptions[Tool][Attr] += " " + Options[Key]
+ else:
+ BuildOptions[Tool][Attr] = Options[Key]
+ # Build Option Family has been checked, which need't to be checked again for family.
+ if FamilyMatch or FamilyIsNull:
+ return BuildOptions
+
+ for Key in Options:
+ if ModuleStyle is not None and len (Key) > 2:
+ # Check Module style is EDK or EDKII.
+ # Only append build option for the matched style module.
+ if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
+ continue
+ elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
+ continue
+ Family = Key[0]
+ Target, Tag, Arch, Tool, Attr = Key[1].split("_")
+ # if tool chain family doesn't match, skip it
+ if Family == "":
+ continue
+ # option has been added before
+ Found = False
+ if Tool in ToolDef:
+ if TAB_TOD_DEFINES_FAMILY in ToolDef[Tool]:
+ if Family == ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:
+ Found = True
+ if TAB_STAR in ToolDef:
+ if TAB_TOD_DEFINES_FAMILY in ToolDef[TAB_STAR]:
+ if Family == ToolDef[TAB_STAR][TAB_TOD_DEFINES_FAMILY]:
+ Found = True
+ if not Found:
+ continue
+
+ # expand any wildcard
+ if Target == TAB_STAR or Target == self.BuildTarget:
+ if Tag == TAB_STAR or Tag == self.ToolChain:
+ if Arch == TAB_STAR or Arch == self.Arch:
+ if Tool not in BuildOptions:
+ BuildOptions[Tool] = {}
+ if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
+ BuildOptions[Tool][Attr] = Options[Key]
+ else:
+ # append options for the same tool except PATH
+ if Attr != 'PATH':
+ BuildOptions[Tool][Attr] += " " + Options[Key]
+ else:
+ BuildOptions[Tool][Attr] = Options[Key]
+ return BuildOptions
+#
+#This class is the pruned WorkSpaceAutoGen for ModuleAutoGen in multiple thread
+#
+class WorkSpaceInfo(AutoGenInfo):
+ def __init__(self,Workspace, MetaFile, Target, ToolChain, Arch):
+ if not hasattr(self, "_Init"):
+ self.do_init(Workspace, MetaFile, Target, ToolChain, Arch)
+ self._Init = True
+ def do_init(self,Workspace, MetaFile, Target, ToolChain, Arch):
+ self._SrcTimeStamp = 0
+ self.Db = BuildDB
+ self.BuildDatabase = self.Db.BuildObject
+ self.Target = Target
+ self.ToolChain = ToolChain
+ self.WorkspaceDir = Workspace
+ self.ActivePlatform = MetaFile
+ self.ArchList = Arch
+ self.AutoGenObjectList = []
+ @property
+ def BuildDir(self):
+ return self.AutoGenObjectList[0].BuildDir
+
+ @property
+ def Name(self):
+ return self.AutoGenObjectList[0].Platform.PlatformName
+
+ @property
+ def FlashDefinition(self):
+ return self.AutoGenObjectList[0].Platform.FlashDefinition
+ @property
+ def GenFdsCommandDict(self):
+ FdsCommandDict = self.AutoGenObjectList[0].DataPipe.Get("FdsCommandDict")
+ if FdsCommandDict:
+ return FdsCommandDict
+ return {}
+
+ @cached_property
+ def FvDir(self):
+ return os.path.join(self.BuildDir, TAB_FV_DIRECTORY)
+
+class PlatformInfo(AutoGenInfo):
+ def __init__(self, Workspace, MetaFile, Target, ToolChain, Arch,DataPipe):
+ if not hasattr(self, "_Init"):
+ self.do_init(Workspace, MetaFile, Target, ToolChain, Arch,DataPipe)
+ self._Init = True
+ def do_init(self,Workspace, MetaFile, Target, ToolChain, Arch,DataPipe):
+ self.Wa = Workspace
+ self.WorkspaceDir = self.Wa.WorkspaceDir
+ self.MetaFile = MetaFile
+ self.Arch = Arch
+ self.Target = Target
+ self.BuildTarget = Target
+ self.ToolChain = ToolChain
+ self.Platform = self.Wa.BuildDatabase[self.MetaFile, self.Arch, self.Target, self.ToolChain]
+
+ self.SourceDir = MetaFile.SubDir
+ self.DataPipe = DataPipe
+ @cached_property
+ def _AsBuildModuleList(self):
+ retVal = self.DataPipe.Get("AsBuildModuleList")
+ if retVal is None:
+ retVal = {}
+ return retVal
+
+ ## Test if a module is supported by the platform
+ #
+ # An error will be raised directly if the module or its arch is not supported
+ # by the platform or current configuration
+ #
+ def ValidModule(self, Module):
+ return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances \
+ or Module in self._AsBuildModuleList
+
+ @cached_property
+ def ToolChainFamily(self):
+ retVal = self.DataPipe.Get("ToolChainFamily")
+ if retVal is None:
+ retVal = {}
+ return retVal
+
+ @cached_property
+ def BuildRuleFamily(self):
+ retVal = self.DataPipe.Get("BuildRuleFamily")
+ if retVal is None:
+ retVal = {}
+ return retVal
+
+ @cached_property
+ def _MbList(self):
+ return [self.Wa.BuildDatabase[m, self.Arch, self.BuildTarget, self.ToolChain] for m in self.Platform.Modules]
+
+ @cached_property
+ def PackageList(self):
+ RetVal = set()
+ for dec_file,Arch in self.DataPipe.Get("PackageList"):
+ RetVal.add(self.Wa.BuildDatabase[dec_file,Arch,self.BuildTarget, self.ToolChain])
+ return list(RetVal)
+
+ ## Return the directory to store all intermediate and final files built
+ @cached_property
+ def BuildDir(self):
+ if os.path.isabs(self.OutputDir):
+ RetVal = os.path.join(
+ os.path.abspath(self.OutputDir),
+ self.Target + "_" + self.ToolChain,
+ )
+ else:
+ RetVal = os.path.join(
+ self.WorkspaceDir,
+ self.OutputDir,
+ self.Target + "_" + self.ToolChain,
+ )
+ return RetVal
+
+ ## Return the build output directory platform specifies
+ @cached_property
+ def OutputDir(self):
+ return self.Platform.OutputDirectory
+
+ ## Return platform name
+ @cached_property
+ def Name(self):
+ return self.Platform.PlatformName
+
+ ## Return meta-file GUID
+ @cached_property
+ def Guid(self):
+ return self.Platform.Guid
+
+ ## Return platform version
+ @cached_property
+ def Version(self):
+ return self.Platform.Version
+
+ ## Return paths of tools
+ @cached_property
+ def ToolDefinition(self):
+ retVal = self.DataPipe.Get("TOOLDEF")
+ if retVal is None:
+ retVal = {}
+ return retVal
+
+ ## Return build command string
+ #
+ # @retval string Build command string
+ #
+ @cached_property
+ def BuildCommand(self):
+ retVal = self.DataPipe.Get("BuildCommand")
+ if retVal is None:
+ retVal = []
+ return retVal
+
+ @cached_property
+ def PcdTokenNumber(self):
+ retVal = self.DataPipe.Get("PCD_TNUM")
+ if retVal is None:
+ retVal = {}
+ return retVal
+
+ ## Override PCD setting (type, value, ...)
+ #
+ # @param ToPcd The PCD to be overridden
+ # @param FromPcd The PCD overriding from
+ #
+ def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):
+ #
+ # in case there's PCDs coming from FDF file, which have no type given.
+ # at this point, ToPcd.Type has the type found from dependent
+ # package
+ #
+ TokenCName = ToPcd.TokenCName
+ for PcdItem in self.MixedPcd:
+ if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in self.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ if FromPcd is not None:
+ if ToPcd.Pending and FromPcd.Type:
+ ToPcd.Type = FromPcd.Type
+ elif ToPcd.Type and FromPcd.Type\
+ and ToPcd.Type != FromPcd.Type and ToPcd.Type in FromPcd.Type:
+ if ToPcd.Type.strip() == TAB_PCDS_DYNAMIC_EX:
+ ToPcd.Type = FromPcd.Type
+ elif ToPcd.Type and FromPcd.Type \
+ and ToPcd.Type != FromPcd.Type:
+ if Library:
+ Module = str(Module) + " 's library file (" + str(Library) + ")"
+ EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
+ ExtraData="%s.%s is used as [%s] in module %s, but as [%s] in %s."\
+ % (ToPcd.TokenSpaceGuidCName, TokenCName,
+ ToPcd.Type, Module, FromPcd.Type, Msg),
+ File=self.MetaFile)
+
+ if FromPcd.MaxDatumSize:
+ ToPcd.MaxDatumSize = FromPcd.MaxDatumSize
+ ToPcd.MaxSizeUserSet = FromPcd.MaxDatumSize
+ if FromPcd.DefaultValue:
+ ToPcd.DefaultValue = FromPcd.DefaultValue
+ if FromPcd.TokenValue:
+ ToPcd.TokenValue = FromPcd.TokenValue
+ if FromPcd.DatumType:
+ ToPcd.DatumType = FromPcd.DatumType
+ if FromPcd.SkuInfoList:
+ ToPcd.SkuInfoList = FromPcd.SkuInfoList
+ if FromPcd.UserDefinedDefaultStoresFlag:
+ ToPcd.UserDefinedDefaultStoresFlag = FromPcd.UserDefinedDefaultStoresFlag
+ # Add Flexible PCD format parse
+ if ToPcd.DefaultValue:
+ try:
+ ToPcd.DefaultValue = ValueExpressionEx(ToPcd.DefaultValue, ToPcd.DatumType, self._GuidDict)(True)
+ except BadExpression as Value:
+ EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName, ToPcd.DefaultValue, Value),
+ File=self.MetaFile)
+
+ # check the validation of datum
+ IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue)
+ if not IsValid:
+ EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile,
+ ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, TokenCName))
+ ToPcd.validateranges = FromPcd.validateranges
+ ToPcd.validlists = FromPcd.validlists
+ ToPcd.expressions = FromPcd.expressions
+ ToPcd.CustomAttribute = FromPcd.CustomAttribute
+
+ if FromPcd is not None and ToPcd.DatumType == TAB_VOID and not ToPcd.MaxDatumSize:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \
+ % (ToPcd.TokenSpaceGuidCName, TokenCName))
+ Value = ToPcd.DefaultValue
+ if not Value:
+ ToPcd.MaxDatumSize = '1'
+ elif Value[0] == 'L':
+ ToPcd.MaxDatumSize = str((len(Value) - 2) * 2)
+ elif Value[0] == '{':
+ ToPcd.MaxDatumSize = str(len(Value.split(',')))
+ else:
+ ToPcd.MaxDatumSize = str(len(Value) - 1)
+
+ # apply default SKU for dynamic PCDS if specified one is not available
+ if (ToPcd.Type in PCD_DYNAMIC_TYPE_SET or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_SET) \
+ and not ToPcd.SkuInfoList:
+ if self.Platform.SkuName in self.Platform.SkuIds:
+ SkuName = self.Platform.SkuName
+ else:
+ SkuName = TAB_DEFAULT
+ ToPcd.SkuInfoList = {
+ SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName][0], '', '', '', '', '', ToPcd.DefaultValue)
+ }
+
+ def ApplyPcdSetting(self, Ma, Pcds, Library=""):
+ # for each PCD in module
+ Module=Ma.Module
+ for Name, Guid in Pcds:
+ PcdInModule = Pcds[Name, Guid]
+ # find out the PCD setting in platform
+ if (Name, Guid) in self.Pcds:
+ PcdInPlatform = self.Pcds[Name, Guid]
+ else:
+ PcdInPlatform = None
+ # then override the settings if any
+ self._OverridePcd(PcdInModule, PcdInPlatform, Module, Msg="DSC PCD sections", Library=Library)
+ # resolve the VariableGuid value
+ for SkuId in PcdInModule.SkuInfoList:
+ Sku = PcdInModule.SkuInfoList[SkuId]
+ if Sku.VariableGuid == '': continue
+ Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)
+ if Sku.VariableGuidValue is None:
+ PackageList = "\n\t".join(str(P) for P in self.PackageList)
+ EdkLogger.error(
+ 'build',
+ RESOURCE_NOT_AVAILABLE,
+ "Value of GUID [%s] is not found in" % Sku.VariableGuid,
+ ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \
+ % (Guid, Name, str(Module)),
+ File=self.MetaFile
+ )
+
+ # override PCD settings with module specific setting
+ ModuleScopePcds = self.DataPipe.Get("MOL_PCDS")
+ if Module in self.Platform.Modules:
+ PlatformModule = self.Platform.Modules[str(Module)]
+ PCD_DATA = ModuleScopePcds.get(Ma.Guid,{})
+ mPcds = {(pcd.TokenCName,pcd.TokenSpaceGuidCName): pcd for pcd in PCD_DATA}
+ for Key in mPcds:
+ if self.BuildOptionPcd:
+ for pcd in self.BuildOptionPcd:
+ (TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, _) = pcd
+ if (TokenCName, TokenSpaceGuidCName) == Key and FieldName =="":
+ PlatformModule.Pcds[Key].DefaultValue = pcdvalue
+ PlatformModule.Pcds[Key].PcdValueFromComm = pcdvalue
+ break
+ Flag = False
+ if Key in Pcds:
+ ToPcd = Pcds[Key]
+ Flag = True
+ elif Key in self.MixedPcd:
+ for PcdItem in self.MixedPcd[Key]:
+ if PcdItem in Pcds:
+ ToPcd = Pcds[PcdItem]
+ Flag = True
+ break
+ if Flag:
+ self._OverridePcd(ToPcd, mPcds[Key], Module, Msg="DSC Components Module scoped PCD section", Library=Library)
+ # use PCD value to calculate the MaxDatumSize when it is not specified
+ for Name, Guid in Pcds:
+ Pcd = Pcds[Name, Guid]
+ if Pcd.DatumType == TAB_VOID and not Pcd.MaxDatumSize:
+ Pcd.MaxSizeUserSet = None
+ Value = Pcd.DefaultValue
+ if not Value:
+ Pcd.MaxDatumSize = '1'
+ elif Value[0] == 'L':
+ Pcd.MaxDatumSize = str((len(Value) - 2) * 2)
+ elif Value[0] == '{':
+ Pcd.MaxDatumSize = str(len(Value.split(',')))
+ else:
+ Pcd.MaxDatumSize = str(len(Value) - 1)
+ return list(Pcds.values())
+
+ @cached_property
+ def Pcds(self):
+ PlatformPcdData = self.DataPipe.Get("PLA_PCD")
+# for pcd in PlatformPcdData:
+# for skuid in pcd.SkuInfoList:
+# pcd.SkuInfoList[skuid] = self.CreateSkuInfoFromDict(pcd.SkuInfoList[skuid])
+ return {(pcddata.TokenCName,pcddata.TokenSpaceGuidCName):pcddata for pcddata in PlatformPcdData}
+
+ def CreateSkuInfoFromDict(self,SkuInfoDict):
+ return SkuInfoClass(
+ SkuInfoDict.get("SkuIdName"),
+ SkuInfoDict.get("SkuId"),
+ SkuInfoDict.get("VariableName"),
+ SkuInfoDict.get("VariableGuid"),
+ SkuInfoDict.get("VariableOffset"),
+ SkuInfoDict.get("HiiDefaultValue"),
+ SkuInfoDict.get("VpdOffset"),
+ SkuInfoDict.get("DefaultValue"),
+ SkuInfoDict.get("VariableGuidValue"),
+ SkuInfoDict.get("VariableAttribute",""),
+ SkuInfoDict.get("DefaultStore",None)
+ )
+ @cached_property
+ def MixedPcd(self):
+ return self.DataPipe.Get("MixedPcd")
+ @cached_property
+ def _GuidDict(self):
+ RetVal = self.DataPipe.Get("GuidDict")
+ if RetVal is None:
+ RetVal = {}
+ return RetVal
+ @cached_property
+ def BuildOptionPcd(self):
+ return self.DataPipe.Get("BuildOptPcd")
+ def ApplyBuildOption(self,module):
+ PlatformOptions = self.DataPipe.Get("PLA_BO")
+ ModuleBuildOptions = self.DataPipe.Get("MOL_BO")
+ ModuleOptionFromDsc = ModuleBuildOptions.get((module.MetaFile.File,module.MetaFile.Root))
+ if ModuleOptionFromDsc:
+ ModuleTypeOptions, PlatformModuleOptions = ModuleOptionFromDsc["ModuleTypeOptions"],ModuleOptionFromDsc["PlatformModuleOptions"]
+ else:
+ ModuleTypeOptions, PlatformModuleOptions = {}, {}
+ ToolDefinition = self.DataPipe.Get("TOOLDEF")
+ ModuleOptions = self._ExpandBuildOption(module.BuildOptions)
+ BuildRuleOrder = None
+ for Options in [ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
+ for Tool in Options:
+ for Attr in Options[Tool]:
+ if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
+ BuildRuleOrder = Options[Tool][Attr]
+
+ AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +
+ list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +
+ list(ToolDefinition.keys()))
+ BuildOptions = defaultdict(lambda: defaultdict(str))
+ for Tool in AllTools:
+ for Options in [ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
+ if Tool not in Options:
+ continue
+ for Attr in Options[Tool]:
+ #
+ # Do not generate it in Makefile
+ #
+ if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
+ continue
+ Value = Options[Tool][Attr]
+ ToolList = [Tool]
+ if Tool == TAB_STAR:
+ ToolList = list(AllTools)
+ ToolList.remove(TAB_STAR)
+ for ExpandedTool in ToolList:
+ # check if override is indicated
+ if Value.startswith('='):
+ BuildOptions[ExpandedTool][Attr] = mws.handleWsMacro(Value[1:])
+ else:
+ if Attr != 'PATH':
+ BuildOptions[ExpandedTool][Attr] += " " + mws.handleWsMacro(Value)
+ else:
+ BuildOptions[ExpandedTool][Attr] = mws.handleWsMacro(Value)
+
+ return BuildOptions, BuildRuleOrder
+
+ def ApplyLibraryInstance(self,module):
+ alldeps = self.DataPipe.Get("DEPS")
+ if alldeps is None:
+ alldeps = {}
+ mod_libs = alldeps.get((module.MetaFile.File,module.MetaFile.Root,module.Arch,module.MetaFile.Path),[])
+ retVal = []
+ for (file_path,root,arch,abs_path) in mod_libs:
+ libMetaFile = PathClass(file_path,root)
+ libMetaFile.OriginalPath = PathClass(file_path,root)
+ libMetaFile.Path = abs_path
+ retVal.append(self.Wa.BuildDatabase[libMetaFile, arch, self.Target,self.ToolChain])
+ return retVal
+
+ ## Parse build_rule.txt in Conf Directory.
+ #
+ # @retval BuildRule object
+ #
+ @cached_property
+ def BuildRule(self):
+ WInfo = self.DataPipe.Get("P_Info")
+ RetVal = WInfo.get("BuildRuleFile")
+ if RetVal._FileVersion == "":
+ RetVal._FileVersion = AutoGenReqBuildRuleVerNum
+ return RetVal
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py
new file mode 100755
index 00000000..29bc5439
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py
@@ -0,0 +1,1603 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2019 - 2021, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2020, ARM Limited. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+import os.path as path
+import copy
+from collections import defaultdict
+
+from .BuildEngine import BuildRule,gDefaultBuildRuleFile,AutoGenReqBuildRuleVerNum
+from .GenVar import VariableMgr, var_info
+from . import GenMake
+from AutoGen.DataPipe import MemoryDataPipe
+from AutoGen.ModuleAutoGen import ModuleAutoGen
+from AutoGen.AutoGen import AutoGen
+from AutoGen.AutoGen import CalculatePriorityValue
+from Workspace.WorkspaceCommon import GetModuleLibInstances
+from CommonDataClass.CommonClass import SkuInfoClass
+from Common.caching import cached_class_function
+from Common.Expression import ValueExpressionEx
+from Common.StringUtils import StringToArray,NormPath
+from Common.BuildToolError import *
+from Common.DataType import *
+from Common.Misc import *
+import Common.VpdInfoFile as VpdInfoFile
+
+## Split command line option string to list
+#
+# subprocess.Popen needs the args to be a sequence. Otherwise there's problem
+# in non-windows platform to launch command
+#
+def _SplitOption(OptionString):
+ OptionList = []
+ LastChar = " "
+ OptionStart = 0
+ QuotationMark = ""
+ for Index in range(0, len(OptionString)):
+ CurrentChar = OptionString[Index]
+ if CurrentChar in ['"', "'"]:
+ if QuotationMark == CurrentChar:
+ QuotationMark = ""
+ elif QuotationMark == "":
+ QuotationMark = CurrentChar
+ continue
+ elif QuotationMark:
+ continue
+
+ if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]:
+ if Index > OptionStart:
+ OptionList.append(OptionString[OptionStart:Index - 1])
+ OptionStart = Index
+ LastChar = CurrentChar
+ OptionList.append(OptionString[OptionStart:])
+ return OptionList
+
+## AutoGen class for platform
+#
+# PlatformAutoGen class will process the original information in platform
+# file in order to generate makefile for platform.
+#
+class PlatformAutoGen(AutoGen):
+ # call super().__init__ then call the worker function with different parameter count
+ def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+ if not hasattr(self, "_Init"):
+ self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)
+ self._Init = True
+ #
+ # Used to store all PCDs for both PEI and DXE phase, in order to generate
+ # correct PCD database
+ #
+ _DynaPcdList_ = []
+ _NonDynaPcdList_ = []
+ _PlatformPcds = {}
+
+
+
+ ## Initialize PlatformAutoGen
+ #
+ #
+ # @param Workspace WorkspaceAutoGen object
+ # @param PlatformFile Platform file (DSC file)
+ # @param Target Build target (DEBUG, RELEASE)
+ # @param Toolchain Name of tool chain
+ # @param Arch arch of the platform supports
+ #
+ def _InitWorker(self, Workspace, PlatformFile, Target, Toolchain, Arch):
+ EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen platform [%s] [%s]" % (PlatformFile, Arch))
+ GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (PlatformFile, Arch, Toolchain, Target)
+
+ self.MetaFile = PlatformFile
+ self.Workspace = Workspace
+ self.WorkspaceDir = Workspace.WorkspaceDir
+ self.ToolChain = Toolchain
+ self.BuildTarget = Target
+ self.Arch = Arch
+ self.SourceDir = PlatformFile.SubDir
+ self.FdTargetList = self.Workspace.FdTargetList
+ self.FvTargetList = self.Workspace.FvTargetList
+ # get the original module/package/platform objects
+ self.BuildDatabase = Workspace.BuildDatabase
+ self.DscBuildDataObj = Workspace.Platform
+
+ # MakeFileName is used to get the Makefile name and as a flag
+ # indicating whether the file has been created.
+ self.MakeFileName = ""
+
+ self._DynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
+ self._NonDynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
+
+ self._AsBuildInfList = []
+ self._AsBuildModuleList = []
+
+ self.VariableInfo = None
+
+ if GlobalData.gFdfParser is not None:
+ self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList
+ for Inf in self._AsBuildInfList:
+ InfClass = PathClass(NormPath(Inf), GlobalData.gWorkspace, self.Arch)
+ M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]
+ if not M.IsBinaryModule:
+ continue
+ self._AsBuildModuleList.append(InfClass)
+ # get library/modules for build
+ self.LibraryBuildDirectoryList = []
+ self.ModuleBuildDirectoryList = []
+
+ self.DataPipe = MemoryDataPipe(self.BuildDir)
+ self.DataPipe.FillData(self)
+
+ return True
+ def FillData_LibConstPcd(self):
+ libConstPcd = {}
+ for LibAuto in self.LibraryAutoGenList:
+ if LibAuto.ConstPcd:
+ libConstPcd[(LibAuto.MetaFile.File,LibAuto.MetaFile.Root,LibAuto.Arch,LibAuto.MetaFile.Path)] = LibAuto.ConstPcd
+ self.DataPipe.DataContainer = {"LibConstPcd":libConstPcd}
+ ## hash() operator of PlatformAutoGen
+ #
+ # The platform file path and arch string will be used to represent
+ # hash value of this object
+ #
+ # @retval int Hash value of the platform file path and arch
+ #
+ @cached_class_function
+ def __hash__(self):
+ return hash((self.MetaFile, self.Arch,self.ToolChain,self.BuildTarget))
+ @cached_class_function
+ def __repr__(self):
+ return "%s [%s]" % (self.MetaFile, self.Arch)
+
+ ## Create autogen code for platform and modules
+ #
+ # Since there's no autogen code for platform, this method will do nothing
+ # if CreateModuleCodeFile is set to False.
+ #
+ # @param CreateModuleCodeFile Flag indicating if creating module's
+ # autogen code file or not
+ #
+ @cached_class_function
+ def CreateCodeFile(self, CreateModuleCodeFile=False):
+ # only module has code to be created, so do nothing if CreateModuleCodeFile is False
+ if not CreateModuleCodeFile:
+ return
+
+ for Ma in self.ModuleAutoGenList:
+ Ma.CreateCodeFile(CreateModuleCodeFile)
+
+ ## Generate Fds Command
+ @cached_property
+ def GenFdsCommand(self):
+ return self.Workspace.GenFdsCommand
+
+ ## Create makefile for the platform and modules in it
+ #
+ # @param CreateModuleMakeFile Flag indicating if the makefile for
+ # modules will be created as well
+ #
+ def CreateMakeFile(self, CreateModuleMakeFile=False, FfsCommand = {}):
+ if CreateModuleMakeFile:
+ for Ma in self._MaList:
+ key = (Ma.MetaFile.File, self.Arch)
+ if key in FfsCommand:
+ Ma.CreateMakeFile(CreateModuleMakeFile, FfsCommand[key])
+ else:
+ Ma.CreateMakeFile(CreateModuleMakeFile)
+ self.CreateLibModuelDirs()
+
+ def CreateLibModuelDirs(self):
+ # No need to create makefile for the platform more than once.
+ if self.MakeFileName:
+ return
+
+ # create library/module build dirs for platform
+ Makefile = GenMake.PlatformMakefile(self)
+ self.LibraryBuildDirectoryList = Makefile.GetLibraryBuildDirectoryList()
+ self.ModuleBuildDirectoryList = Makefile.GetModuleBuildDirectoryList()
+ self.MakeFileName = Makefile.getMakefileName()
+
+ @property
+ def AllPcdList(self):
+ return self.DynamicPcdList + self.NonDynamicPcdList
+ ## Deal with Shared FixedAtBuild Pcds
+ #
+ def CollectFixedAtBuildPcds(self):
+ for LibAuto in self.LibraryAutoGenList:
+ FixedAtBuildPcds = {}
+ ShareFixedAtBuildPcdsSameValue = {}
+ for Module in LibAuto.ReferenceModules:
+ for Pcd in set(Module.FixedAtBuildPcds + LibAuto.FixedAtBuildPcds):
+ DefaultValue = Pcd.DefaultValue
+ # Cover the case: DSC component override the Pcd value and the Pcd only used in one Lib
+ if Pcd in Module.LibraryPcdList:
+ Index = Module.LibraryPcdList.index(Pcd)
+ DefaultValue = Module.LibraryPcdList[Index].DefaultValue
+ key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ if key not in FixedAtBuildPcds:
+ ShareFixedAtBuildPcdsSameValue[key] = True
+ FixedAtBuildPcds[key] = DefaultValue
+ else:
+ if FixedAtBuildPcds[key] != DefaultValue:
+ ShareFixedAtBuildPcdsSameValue[key] = False
+ for Pcd in LibAuto.FixedAtBuildPcds:
+ key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in self.NonDynamicPcdDict:
+ continue
+ else:
+ DscPcd = self.NonDynamicPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)]
+ if DscPcd.Type != TAB_PCDS_FIXED_AT_BUILD:
+ continue
+ if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:
+ LibAuto.ConstPcd[key] = FixedAtBuildPcds[key]
+
+ def CollectVariables(self, DynamicPcdSet):
+ VpdRegionSize = 0
+ VpdRegionBase = 0
+ if self.Workspace.FdfFile:
+ FdDict = self.Workspace.FdfProfile.FdDict[GlobalData.gFdfParser.CurrentFdName]
+ for FdRegion in FdDict.RegionList:
+ for item in FdRegion.RegionDataList:
+ if self.Platform.VpdToolGuid.strip() and self.Platform.VpdToolGuid in item:
+ VpdRegionSize = FdRegion.Size
+ VpdRegionBase = FdRegion.Offset
+ break
+
+ VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(), self.DscBuildDataObj.SkuIds)
+ VariableInfo.SetVpdRegionMaxSize(VpdRegionSize)
+ VariableInfo.SetVpdRegionOffset(VpdRegionBase)
+ Index = 0
+ for Pcd in sorted(DynamicPcdSet):
+ pcdname = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ for SkuName in Pcd.SkuInfoList:
+ Sku = Pcd.SkuInfoList[SkuName]
+ SkuId = Sku.SkuId
+ if SkuId is None or SkuId == '':
+ continue
+ if len(Sku.VariableName) > 0:
+ if Sku.VariableAttribute and 'NV' not in Sku.VariableAttribute:
+ continue
+ VariableGuidStructure = Sku.VariableGuidValue
+ VariableGuid = GuidStructureStringToGuidString(VariableGuidStructure)
+ for StorageName in Sku.DefaultStoreDict:
+ VariableInfo.append_variable(var_info(Index, pcdname, StorageName, SkuName, StringToArray(Sku.VariableName), VariableGuid, Sku.VariableOffset, Sku.VariableAttribute, Sku.HiiDefaultValue, Sku.DefaultStoreDict[StorageName] if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES else StringToArray(Sku.DefaultStoreDict[StorageName]), Pcd.DatumType, Pcd.CustomAttribute['DscPosition'], Pcd.CustomAttribute.get('IsStru',False)))
+ Index += 1
+ return VariableInfo
+
+ def UpdateNVStoreMaxSize(self, OrgVpdFile):
+ if self.VariableInfo:
+ VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)
+ PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]
+
+ if PcdNvStoreDfBuffer:
+ try:
+ OrgVpdFile.Read(VpdMapFilePath)
+ PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])
+ NvStoreOffset = list(PcdItems.values())[0].strip() if PcdItems else '0'
+ except:
+ EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
+
+ NvStoreOffset = int(NvStoreOffset, 16) if NvStoreOffset.upper().startswith("0X") else int(NvStoreOffset)
+ default_skuobj = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)
+ maxsize = self.VariableInfo.VpdRegionSize - NvStoreOffset if self.VariableInfo.VpdRegionSize else len(default_skuobj.DefaultValue.split(","))
+ var_data = self.VariableInfo.PatchNVStoreDefaultMaxSize(maxsize)
+
+ if var_data and default_skuobj:
+ default_skuobj.DefaultValue = var_data
+ PcdNvStoreDfBuffer[0].DefaultValue = var_data
+ PcdNvStoreDfBuffer[0].SkuInfoList.clear()
+ PcdNvStoreDfBuffer[0].SkuInfoList[TAB_DEFAULT] = default_skuobj
+ PcdNvStoreDfBuffer[0].MaxDatumSize = str(len(default_skuobj.DefaultValue.split(",")))
+
+ return OrgVpdFile
+
+ ## Collect dynamic PCDs
+ #
+ # Gather dynamic PCDs list from each module and their settings from platform
+ # This interface should be invoked explicitly when platform action is created.
+ #
+ def CollectPlatformDynamicPcds(self):
+ self.CategoryPcds()
+ self.SortDynamicPcd()
+
+ def CategoryPcds(self):
+ # Category Pcds into DynamicPcds and NonDynamicPcds
+ # for gathering error information
+ NoDatumTypePcdList = set()
+ FdfModuleList = []
+ for InfName in self._AsBuildInfList:
+ InfName = mws.join(self.WorkspaceDir, InfName)
+ FdfModuleList.append(os.path.normpath(InfName))
+ for M in self._MbList:
+# F is the Module for which M is the module autogen
+ ModPcdList = self.ApplyPcdSetting(M, M.ModulePcdList)
+ LibPcdList = []
+ for lib in M.LibraryPcdList:
+ LibPcdList.extend(self.ApplyPcdSetting(M, M.LibraryPcdList[lib], lib))
+ for PcdFromModule in ModPcdList + LibPcdList:
+
+ # make sure that the "VOID*" kind of datum has MaxDatumSize set
+ if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:
+ NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, M.MetaFile))
+
+ # Check the PCD from Binary INF or Source INF
+ if M.IsBinaryModule == True:
+ PcdFromModule.IsFromBinaryInf = True
+
+ # Check the PCD from DSC or not
+ PcdFromModule.IsFromDsc = (PcdFromModule.TokenCName, PcdFromModule.TokenSpaceGuidCName) in self.Platform.Pcds
+
+ if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET or PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ if M.MetaFile.Path not in FdfModuleList:
+ # If one of the Source built modules listed in the DSC is not listed
+ # in FDF modules, and the INF lists a PCD can only use the PcdsDynamic
+ # access method (it is only listed in the DEC file that declares the
+ # PCD as PcdsDynamic), then build tool will report warning message
+ # notify the PI that they are attempting to build a module that must
+ # be included in a flash image in order to be functional. These Dynamic
+ # PCD will not be added into the Database unless it is used by other
+ # modules that are included in the FDF file.
+ if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET and \
+ PcdFromModule.IsFromBinaryInf == False:
+ # Print warning message to let the developer make a determine.
+ continue
+ # If one of the Source built modules listed in the DSC is not listed in
+ # FDF modules, and the INF lists a PCD can only use the PcdsDynamicEx
+ # access method (it is only listed in the DEC file that declares the
+ # PCD as PcdsDynamicEx), then DO NOT break the build; DO NOT add the
+ # PCD to the Platform's PCD Database.
+ if PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ continue
+ #
+ # If a dynamic PCD used by a PEM module/PEI module & DXE module,
+ # it should be stored in Pcd PEI database, If a dynamic only
+ # used by DXE module, it should be stored in DXE PCD database.
+ # The default Phase is DXE
+ #
+ if M.ModuleType in SUP_MODULE_SET_PEI:
+ PcdFromModule.Phase = "PEI"
+ if PcdFromModule not in self._DynaPcdList_:
+ self._DynaPcdList_.append(PcdFromModule)
+ elif PcdFromModule.Phase == 'PEI':
+ # overwrite any the same PCD existing, if Phase is PEI
+ Index = self._DynaPcdList_.index(PcdFromModule)
+ self._DynaPcdList_[Index] = PcdFromModule
+ elif PcdFromModule not in self._NonDynaPcdList_:
+ self._NonDynaPcdList_.append(PcdFromModule)
+ elif PcdFromModule in self._NonDynaPcdList_ and PcdFromModule.IsFromBinaryInf == True:
+ Index = self._NonDynaPcdList_.index(PcdFromModule)
+ if self._NonDynaPcdList_[Index].IsFromBinaryInf == False:
+ #The PCD from Binary INF will override the same one from source INF
+ self._NonDynaPcdList_.remove (self._NonDynaPcdList_[Index])
+ PcdFromModule.Pending = False
+ self._NonDynaPcdList_.append (PcdFromModule)
+ DscModuleSet = {os.path.normpath(ModuleInf.Path) for ModuleInf in self.Platform.Modules}
+ # add the PCD from modules that listed in FDF but not in DSC to Database
+ for InfName in FdfModuleList:
+ if InfName not in DscModuleSet:
+ InfClass = PathClass(InfName)
+ M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]
+ # If a module INF in FDF but not in current arch's DSC module list, it must be module (either binary or source)
+ # for different Arch. PCDs in source module for different Arch is already added before, so skip the source module here.
+ # For binary module, if in current arch, we need to list the PCDs into database.
+ if not M.IsBinaryModule:
+ continue
+ # Override the module PCD setting by platform setting
+ ModulePcdList = self.ApplyPcdSetting(M, M.Pcds)
+ for PcdFromModule in ModulePcdList:
+ PcdFromModule.IsFromBinaryInf = True
+ PcdFromModule.IsFromDsc = False
+ # Only allow the DynamicEx and Patchable PCD in AsBuild INF
+ if PcdFromModule.Type not in PCD_DYNAMIC_EX_TYPE_SET and PcdFromModule.Type not in TAB_PCDS_PATCHABLE_IN_MODULE:
+ EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
+ File=self.MetaFile,
+ ExtraData="\n\tExisted %s PCD %s in:\n\t\t%s\n"
+ % (PcdFromModule.Type, PcdFromModule.TokenCName, InfName))
+ # make sure that the "VOID*" kind of datum has MaxDatumSize set
+ if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:
+ NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, InfName))
+ if M.ModuleType in SUP_MODULE_SET_PEI:
+ PcdFromModule.Phase = "PEI"
+ if PcdFromModule not in self._DynaPcdList_ and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ self._DynaPcdList_.append(PcdFromModule)
+ elif PcdFromModule not in self._NonDynaPcdList_ and PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE:
+ self._NonDynaPcdList_.append(PcdFromModule)
+ if PcdFromModule in self._DynaPcdList_ and PcdFromModule.Phase == 'PEI' and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ # Overwrite the phase of any the same PCD existing, if Phase is PEI.
+ # It is to solve the case that a dynamic PCD used by a PEM module/PEI
+ # module & DXE module at a same time.
+ # Overwrite the type of the PCDs in source INF by the type of AsBuild
+ # INF file as DynamicEx.
+ Index = self._DynaPcdList_.index(PcdFromModule)
+ self._DynaPcdList_[Index].Phase = PcdFromModule.Phase
+ self._DynaPcdList_[Index].Type = PcdFromModule.Type
+ for PcdFromModule in self._NonDynaPcdList_:
+ # If a PCD is not listed in the DSC file, but binary INF files used by
+ # this platform all (that use this PCD) list the PCD in a [PatchPcds]
+ # section, AND all source INF files used by this platform the build
+ # that use the PCD list the PCD in either a [Pcds] or [PatchPcds]
+ # section, then the tools must NOT add the PCD to the Platform's PCD
+ # Database; the build must assign the access method for this PCD as
+ # PcdsPatchableInModule.
+ if PcdFromModule not in self._DynaPcdList_:
+ continue
+ Index = self._DynaPcdList_.index(PcdFromModule)
+ if PcdFromModule.IsFromDsc == False and \
+ PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE and \
+ PcdFromModule.IsFromBinaryInf == True and \
+ self._DynaPcdList_[Index].IsFromBinaryInf == False:
+ Index = self._DynaPcdList_.index(PcdFromModule)
+ self._DynaPcdList_.remove (self._DynaPcdList_[Index])
+
+ # print out error information and break the build, if error found
+ if len(NoDatumTypePcdList) > 0:
+ NoDatumTypePcdListString = "\n\t\t".join(NoDatumTypePcdList)
+ EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
+ File=self.MetaFile,
+ ExtraData="\n\tPCD(s) without MaxDatumSize:\n\t\t%s\n"
+ % NoDatumTypePcdListString)
+ self._NonDynamicPcdList = sorted(self._NonDynaPcdList_)
+ self._DynamicPcdList = self._DynaPcdList_
+
+ def SortDynamicPcd(self):
+ #
+ # Sort dynamic PCD list to:
+ # 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should
+ # try to be put header of dynamicd List
+ # 2) If PCD is HII type, the PCD item should be put after unicode type PCD
+ #
+ # The reason of sorting is make sure the unicode string is in double-byte alignment in string table.
+ #
+ UnicodePcdArray = set()
+ HiiPcdArray = set()
+ OtherPcdArray = set()
+ VpdPcdDict = {}
+ VpdFile = VpdInfoFile.VpdInfoFile()
+ NeedProcessVpdMapFile = False
+
+ for pcd in self.Platform.Pcds:
+ if pcd not in self._PlatformPcds:
+ self._PlatformPcds[pcd] = self.Platform.Pcds[pcd]
+
+ for item in self._PlatformPcds:
+ if self._PlatformPcds[item].DatumType and self._PlatformPcds[item].DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
+ self._PlatformPcds[item].DatumType = TAB_VOID
+
+ if (self.Workspace.ArchList[-1] == self.Arch):
+ for Pcd in self._DynamicPcdList:
+ # just pick the a value to determine whether is unicode string type
+ Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+
+ if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
+ Pcd.DatumType = TAB_VOID
+
+ # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
+ # if found HII type PCD then insert to right of UnicodeIndex
+ if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
+ VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd
+
+ #Collect DynamicHii PCD values and assign it to DynamicExVpd PCD gEfiMdeModulePkgTokenSpaceGuid.PcdNvStoreDefaultValueBuffer
+ PcdNvStoreDfBuffer = VpdPcdDict.get(("PcdNvStoreDefaultValueBuffer", "gEfiMdeModulePkgTokenSpaceGuid"))
+ if PcdNvStoreDfBuffer:
+ self.VariableInfo = self.CollectVariables(self._DynamicPcdList)
+ vardump = self.VariableInfo.dump()
+ if vardump:
+ #
+ #According to PCD_DATABASE_INIT in edk2\MdeModulePkg\Include\Guid\PcdDataBaseSignatureGuid.h,
+ #the max size for string PCD should not exceed USHRT_MAX 65535(0xffff).
+ #typedef UINT16 SIZE_INFO;
+ #//SIZE_INFO SizeTable[];
+ if len(vardump.split(",")) > 0xffff:
+ EdkLogger.error("build", RESOURCE_OVERFLOW, 'The current length of PCD %s value is %d, it exceeds to the max size of String PCD.' %(".".join([PcdNvStoreDfBuffer.TokenSpaceGuidCName,PcdNvStoreDfBuffer.TokenCName]) ,len(vardump.split(","))))
+ PcdNvStoreDfBuffer.DefaultValue = vardump
+ for skuname in PcdNvStoreDfBuffer.SkuInfoList:
+ PcdNvStoreDfBuffer.SkuInfoList[skuname].DefaultValue = vardump
+ PcdNvStoreDfBuffer.MaxDatumSize = str(len(vardump.split(",")))
+ else:
+ #If the end user define [DefaultStores] and [XXX.Menufacturing] in DSC, but forget to configure PcdNvStoreDefaultValueBuffer to PcdsDynamicVpd
+ if [Pcd for Pcd in self._DynamicPcdList if Pcd.UserDefinedDefaultStoresFlag]:
+ EdkLogger.warn("build", "PcdNvStoreDefaultValueBuffer should be defined as PcdsDynamicExVpd in dsc file since the DefaultStores is enabled for this platform.\n%s" %self.Platform.MetaFile.Path)
+ PlatformPcds = sorted(self._PlatformPcds.keys())
+ #
+ # Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.
+ #
+ VpdSkuMap = {}
+ for PcdKey in PlatformPcds:
+ Pcd = self._PlatformPcds[PcdKey]
+ if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD] and \
+ PcdKey in VpdPcdDict:
+ Pcd = VpdPcdDict[PcdKey]
+ SkuValueMap = {}
+ DefaultSku = Pcd.SkuInfoList.get(TAB_DEFAULT)
+ if DefaultSku:
+ PcdValue = DefaultSku.DefaultValue
+ if PcdValue not in SkuValueMap:
+ SkuValueMap[PcdValue] = []
+ VpdFile.Add(Pcd, TAB_DEFAULT, DefaultSku.VpdOffset)
+ SkuValueMap[PcdValue].append(DefaultSku)
+
+ for (SkuName, Sku) in Pcd.SkuInfoList.items():
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+ PcdValue = Sku.DefaultValue
+ if PcdValue == "":
+ PcdValue = Pcd.DefaultValue
+ if Sku.VpdOffset != TAB_STAR:
+ if PcdValue.startswith("{"):
+ Alignment = 8
+ elif PcdValue.startswith("L"):
+ Alignment = 2
+ else:
+ Alignment = 1
+ try:
+ VpdOffset = int(Sku.VpdOffset)
+ except:
+ try:
+ VpdOffset = int(Sku.VpdOffset, 16)
+ except:
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ if VpdOffset % Alignment != 0:
+ if PcdValue.startswith("{"):
+ EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName), File=self.MetaFile)
+ else:
+ EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Alignment))
+ if PcdValue not in SkuValueMap:
+ SkuValueMap[PcdValue] = []
+ VpdFile.Add(Pcd, SkuName, Sku.VpdOffset)
+ SkuValueMap[PcdValue].append(Sku)
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == TAB_STAR:
+ NeedProcessVpdMapFile = True
+ if self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == '':
+ EdkLogger.error("Build", FILE_NOT_FOUND, \
+ "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
+
+ VpdSkuMap[PcdKey] = SkuValueMap
+ #
+ # Fix the PCDs define in VPD PCD section that never referenced by module.
+ # An example is PCD for signature usage.
+ #
+ for DscPcd in PlatformPcds:
+ DscPcdEntry = self._PlatformPcds[DscPcd]
+ if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
+ if not (self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == ''):
+ FoundFlag = False
+ for VpdPcd in VpdFile._VpdArray:
+ # This PCD has been referenced by module
+ if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
+ (VpdPcd.TokenCName == DscPcdEntry.TokenCName):
+ FoundFlag = True
+
+ # Not found, it should be signature
+ if not FoundFlag :
+ # just pick the a value to determine whether is unicode string type
+ SkuValueMap = {}
+ SkuObjList = list(DscPcdEntry.SkuInfoList.items())
+ DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)
+ if DefaultSku:
+ defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))
+ SkuObjList[0], SkuObjList[defaultindex] = SkuObjList[defaultindex], SkuObjList[0]
+ for (SkuName, Sku) in SkuObjList:
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+
+ # Need to iterate DEC pcd information to get the value & datumtype
+ for eachDec in self.PackageList:
+ for DecPcd in eachDec.Pcds:
+ DecPcdEntry = eachDec.Pcds[DecPcd]
+ if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
+ (DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):
+ # Print warning message to let the developer make a determine.
+ EdkLogger.warn("build", "Unreferenced vpd pcd used!",
+ File=self.MetaFile, \
+ ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \
+ %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))
+
+ DscPcdEntry.DatumType = DecPcdEntry.DatumType
+ DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
+ DscPcdEntry.TokenValue = DecPcdEntry.TokenValue
+ DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]
+ # Only fix the value while no value provided in DSC file.
+ if not Sku.DefaultValue:
+ DscPcdEntry.SkuInfoList[list(DscPcdEntry.SkuInfoList.keys())[0]].DefaultValue = DecPcdEntry.DefaultValue
+
+ if DscPcdEntry not in self._DynamicPcdList:
+ self._DynamicPcdList.append(DscPcdEntry)
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+ PcdValue = Sku.DefaultValue
+ if PcdValue == "":
+ PcdValue = DscPcdEntry.DefaultValue
+ if Sku.VpdOffset != TAB_STAR:
+ if PcdValue.startswith("{"):
+ Alignment = 8
+ elif PcdValue.startswith("L"):
+ Alignment = 2
+ else:
+ Alignment = 1
+ try:
+ VpdOffset = int(Sku.VpdOffset)
+ except:
+ try:
+ VpdOffset = int(Sku.VpdOffset, 16)
+ except:
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName))
+ if VpdOffset % Alignment != 0:
+ if PcdValue.startswith("{"):
+ EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName), File=self.MetaFile)
+ else:
+ EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, Alignment))
+ if PcdValue not in SkuValueMap:
+ SkuValueMap[PcdValue] = []
+ VpdFile.Add(DscPcdEntry, SkuName, Sku.VpdOffset)
+ SkuValueMap[PcdValue].append(Sku)
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == TAB_STAR:
+ NeedProcessVpdMapFile = True
+ if DscPcdEntry.DatumType == TAB_VOID and PcdValue.startswith("L"):
+ UnicodePcdArray.add(DscPcdEntry)
+ elif len(Sku.VariableName) > 0:
+ HiiPcdArray.add(DscPcdEntry)
+ else:
+ OtherPcdArray.add(DscPcdEntry)
+
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.
+ VpdSkuMap[DscPcd] = SkuValueMap
+ if (self.Platform.FlashDefinition is None or self.Platform.FlashDefinition == '') and \
+ VpdFile.GetCount() != 0:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
+ "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
+
+ if VpdFile.GetCount() != 0:
+
+ self.FixVpdOffset(VpdFile)
+
+ self.FixVpdOffset(self.UpdateNVStoreMaxSize(VpdFile))
+ PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]
+ if PcdNvStoreDfBuffer:
+ PcdName,PcdGuid = PcdNvStoreDfBuffer[0].TokenCName, PcdNvStoreDfBuffer[0].TokenSpaceGuidCName
+ if (PcdName,PcdGuid) in VpdSkuMap:
+ DefaultSku = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)
+ VpdSkuMap[(PcdName,PcdGuid)] = {DefaultSku.DefaultValue:[SkuObj for SkuObj in PcdNvStoreDfBuffer[0].SkuInfoList.values() ]}
+
+ # Process VPD map file generated by third party BPDG tool
+ if NeedProcessVpdMapFile:
+ VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)
+ try:
+ VpdFile.Read(VpdMapFilePath)
+
+ # Fixup TAB_STAR offset
+ for pcd in VpdSkuMap:
+ vpdinfo = VpdFile.GetVpdInfo(pcd)
+ if vpdinfo is None:
+ # just pick the a value to determine whether is unicode string type
+ continue
+ for pcdvalue in VpdSkuMap[pcd]:
+ for sku in VpdSkuMap[pcd][pcdvalue]:
+ for item in vpdinfo:
+ if item[2] == pcdvalue:
+ sku.VpdOffset = item[1]
+ except:
+ EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
+
+ # Delete the DynamicPcdList At the last time enter into this function
+ for Pcd in self._DynamicPcdList:
+ # just pick the a value to determine whether is unicode string type
+ Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+
+ if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
+ Pcd.DatumType = TAB_VOID
+
+ PcdValue = Sku.DefaultValue
+ if Pcd.DatumType == TAB_VOID and PcdValue.startswith("L"):
+ # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
+ UnicodePcdArray.add(Pcd)
+ elif len(Sku.VariableName) > 0:
+ # if found HII type PCD then insert to right of UnicodeIndex
+ HiiPcdArray.add(Pcd)
+ else:
+ OtherPcdArray.add(Pcd)
+ del self._DynamicPcdList[:]
+ self._DynamicPcdList.extend(list(UnicodePcdArray))
+ self._DynamicPcdList.extend(list(HiiPcdArray))
+ self._DynamicPcdList.extend(list(OtherPcdArray))
+ self._DynamicPcdList.sort()
+ allskuset = [(SkuName, Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName, Sku) in pcd.SkuInfoList.items()]
+ for pcd in self._DynamicPcdList:
+ if len(pcd.SkuInfoList) == 1:
+ for (SkuName, SkuId) in allskuset:
+ if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:
+ continue
+ pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])
+ pcd.SkuInfoList[SkuName].SkuId = SkuId
+ pcd.SkuInfoList[SkuName].SkuIdName = SkuName
+
+ def FixVpdOffset(self, VpdFile ):
+ FvPath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY)
+ if not os.path.exists(FvPath):
+ try:
+ os.makedirs(FvPath)
+ except:
+ EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
+
+ VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
+
+ if VpdFile.Write(VpdFilePath):
+ # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.
+ BPDGToolName = None
+ for ToolDef in self.ToolDefinition.values():
+ if TAB_GUID in ToolDef and ToolDef[TAB_GUID] == self.Platform.VpdToolGuid:
+ if "PATH" not in ToolDef:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)
+ BPDGToolName = ToolDef["PATH"]
+ break
+ # Call third party GUID BPDG tool.
+ if BPDGToolName is not None:
+ VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)
+ else:
+ EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
+
+ ## Return the platform build data object
+ @cached_property
+ def Platform(self):
+ return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
+
+ ## Return platform name
+ @cached_property
+ def Name(self):
+ return self.Platform.PlatformName
+
+ ## Return the meta file GUID
+ @cached_property
+ def Guid(self):
+ return self.Platform.Guid
+
+ ## Return the platform version
+ @cached_property
+ def Version(self):
+ return self.Platform.Version
+
+ ## Return the FDF file name
+ @cached_property
+ def FdfFile(self):
+ if self.Workspace.FdfFile:
+ RetVal= mws.join(self.WorkspaceDir, self.Workspace.FdfFile)
+ else:
+ RetVal = ''
+ return RetVal
+
+ ## Return the build output directory platform specifies
+ @cached_property
+ def OutputDir(self):
+ return self.Platform.OutputDirectory
+
+ ## Return the directory to store all intermediate and final files built
+ @cached_property
+ def BuildDir(self):
+ if os.path.isabs(self.OutputDir):
+ GlobalData.gBuildDirectory = RetVal = path.join(
+ path.abspath(self.OutputDir),
+ self.BuildTarget + "_" + self.ToolChain,
+ )
+ else:
+ GlobalData.gBuildDirectory = RetVal = path.join(
+ self.WorkspaceDir,
+ self.OutputDir,
+ self.BuildTarget + "_" + self.ToolChain,
+ )
+ return RetVal
+
+ ## Return directory of platform makefile
+ #
+ # @retval string Makefile directory
+ #
+ @cached_property
+ def MakeFileDir(self):
+ return path.join(self.BuildDir, self.Arch)
+
+ ## Return build command string
+ #
+ # @retval string Build command string
+ #
+ @cached_property
+ def BuildCommand(self):
+ if "MAKE" in self.EdkIIBuildOption and "PATH" in self.EdkIIBuildOption["MAKE"]:
+ # MAKE_PATH in DSC [BuildOptions] section is higher priority
+ Path = self.EdkIIBuildOption["MAKE"]["PATH"]
+ if Path.startswith('='):
+ Path = Path[1:].strip()
+ RetVal = _SplitOption(Path)
+ elif "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:
+ RetVal = _SplitOption(self.ToolDefinition["MAKE"]["PATH"])
+ else:
+ return []
+ if "MAKE" in self.ToolDefinition and "FLAGS" in self.ToolDefinition["MAKE"]:
+ NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip()
+ if NewOption != '':
+ RetVal += _SplitOption(NewOption)
+ if "MAKE" in self.EdkIIBuildOption and "FLAGS" in self.EdkIIBuildOption["MAKE"]:
+ Flags = self.EdkIIBuildOption["MAKE"]["FLAGS"]
+ if Flags.startswith('='):
+ RetVal = [RetVal[0]] + _SplitOption(Flags[1:].strip())
+ else:
+ RetVal = RetVal + _SplitOption(Flags.strip())
+ return RetVal
+
+ ## Compute a tool defintion key priority value in range 0..15
+ #
+ # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE 15
+ # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE 14
+ # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE 13
+ # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE 12
+ # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE 11
+ # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE 10
+ # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE 9
+ # ******_*********_****_COMMANDTYPE_ATTRIBUTE 8
+ # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE 7
+ # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE 6
+ # TARGET_*********_ARCH_***********_ATTRIBUTE 5
+ # ******_*********_ARCH_***********_ATTRIBUTE 4
+ # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE 3
+ # ******_TOOLCHAIN_****_***********_ATTRIBUTE 2
+ # TARGET_*********_****_***********_ATTRIBUTE 1
+ # ******_*********_****_***********_ATTRIBUTE 0
+ #
+ def ToolDefinitionPriority (self,Key):
+ KeyList = Key.split('_')
+ Priority = 0
+ for Index in range (0, min(4, len(KeyList))):
+ if KeyList[Index] != '*':
+ Priority += (1 << Index)
+ return Priority
+
+ ## Get tool chain definition
+ #
+ # Get each tool definition for given tool chain from tools_def.txt and platform
+ #
+ @cached_property
+ def ToolDefinition(self):
+ ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary
+ if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase:
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration",
+ ExtraData="[%s]" % self.MetaFile)
+ RetVal = OrderedDict()
+ DllPathList = set()
+
+ PrioritizedDefList = sorted(ToolDefinition.keys(), key=self.ToolDefinitionPriority, reverse=True)
+ for Def in PrioritizedDefList:
+ Target, Tag, Arch, Tool, Attr = Def.split("_")
+ if Target == TAB_STAR:
+ Target = self.BuildTarget
+ if Tag == TAB_STAR:
+ Tag = self.ToolChain
+ if Arch == TAB_STAR:
+ Arch = self.Arch
+ if Target != self.BuildTarget or Tag != self.ToolChain or Arch != self.Arch:
+ continue
+
+ Value = ToolDefinition[Def]
+ # don't record the DLL
+ if Attr == "DLL":
+ DllPathList.add(Value)
+ continue
+
+ #
+ # ToolDefinition is sorted from highest priority to lowest priority.
+ # Only add the first(highest priority) match to RetVal
+ #
+ if Tool not in RetVal:
+ RetVal[Tool] = OrderedDict()
+ if Attr not in RetVal[Tool]:
+ RetVal[Tool][Attr] = Value
+
+ ToolsDef = ''
+ if GlobalData.gOptions.SilentMode and "MAKE" in RetVal:
+ if "FLAGS" not in RetVal["MAKE"]:
+ RetVal["MAKE"]["FLAGS"] = ""
+ RetVal["MAKE"]["FLAGS"] += " -s"
+ MakeFlags = ''
+
+ ToolList = list(RetVal.keys())
+ ToolList.sort()
+ for Tool in ToolList:
+ if Tool == TAB_STAR:
+ continue
+ AttrList = list(RetVal[Tool].keys())
+ if TAB_STAR in ToolList:
+ AttrList += list(RetVal[TAB_STAR])
+ AttrList.sort()
+ for Attr in AttrList:
+ if Attr in RetVal[Tool]:
+ Value = RetVal[Tool][Attr]
+ else:
+ Value = RetVal[TAB_STAR][Attr]
+ if Tool in self._BuildOptionWithToolDef(RetVal) and Attr in self._BuildOptionWithToolDef(RetVal)[Tool]:
+ # check if override is indicated
+ if self._BuildOptionWithToolDef(RetVal)[Tool][Attr].startswith('='):
+ Value = self._BuildOptionWithToolDef(RetVal)[Tool][Attr][1:].strip()
+ else:
+ # Do not append PATH or GUID
+ if Attr != 'PATH' and Attr != 'GUID':
+ Value += " " + self._BuildOptionWithToolDef(RetVal)[Tool][Attr]
+ else:
+ Value = self._BuildOptionWithToolDef(RetVal)[Tool][Attr]
+ if Attr == "PATH":
+ # Don't put MAKE definition in the file
+ if Tool != "MAKE":
+ ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)
+ elif Attr != "DLL":
+ # Don't put MAKE definition in the file
+ if Tool == "MAKE":
+ if Attr == "FLAGS":
+ MakeFlags = Value
+ else:
+ ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)
+ ToolsDef += "\n"
+
+ tool_def_file = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)
+ SaveFileOnChange(tool_def_file, ToolsDef, False)
+ for DllPath in DllPathList:
+ os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]
+ os.environ["MAKE_FLAGS"] = MakeFlags
+
+ return RetVal
+
+ ## Return the paths of tools
+ @cached_property
+ def ToolDefinitionFile(self):
+ tool_def_file = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)
+ if not os.path.exists(tool_def_file):
+ self.ToolDefinition
+ return tool_def_file
+
+ ## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'.
+ @cached_property
+ def ToolChainFamily(self):
+ ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
+ if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \
+ or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \
+ or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]:
+ EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \
+ % self.ToolChain)
+ RetVal = TAB_COMPILER_MSFT
+ else:
+ RetVal = ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]
+ return RetVal
+
+ @cached_property
+ def BuildRuleFamily(self):
+ ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \
+ or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \
+ or not ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]:
+ EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \
+ % self.ToolChain)
+ return TAB_COMPILER_MSFT
+
+ return ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]
+
+ ## Return the build options specific for all modules in this platform
+ @cached_property
+ def BuildOption(self):
+ return self._ExpandBuildOption(self.Platform.BuildOptions)
+
+ def _BuildOptionWithToolDef(self, ToolDef):
+ return self._ExpandBuildOption(self.Platform.BuildOptions, ToolDef=ToolDef)
+
+ ## Return the build options specific for EDK modules in this platform
+ @cached_property
+ def EdkBuildOption(self):
+ return self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)
+
+ ## Return the build options specific for EDKII modules in this platform
+ @cached_property
+ def EdkIIBuildOption(self):
+ return self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)
+
+ ## Parse build_rule.txt in Conf Directory.
+ #
+ # @retval BuildRule object
+ #
+ @cached_property
+ def BuildRule(self):
+ BuildRuleFile = None
+ if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary:
+ BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]
+ if not BuildRuleFile:
+ BuildRuleFile = gDefaultBuildRuleFile
+ RetVal = BuildRule(BuildRuleFile)
+ if RetVal._FileVersion == "":
+ RetVal._FileVersion = AutoGenReqBuildRuleVerNum
+ else:
+ if RetVal._FileVersion < AutoGenReqBuildRuleVerNum :
+ # If Build Rule's version is less than the version number required by the tools, halting the build.
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ ExtraData="The version number [%s] of build_rule.txt is less than the version number required by the AutoGen.(the minimum required version number is [%s])"\
+ % (RetVal._FileVersion, AutoGenReqBuildRuleVerNum))
+ return RetVal
+
+ ## Summarize the packages used by modules in this platform
+ @cached_property
+ def PackageList(self):
+ RetVal = set()
+ for Mb in self._MbList:
+ RetVal.update(Mb.Packages)
+ for lb in Mb.LibInstances:
+ RetVal.update(lb.Packages)
+ #Collect package set information from INF of FDF
+ for ModuleFile in self._AsBuildModuleList:
+ if ModuleFile in self.Platform.Modules:
+ continue
+ ModuleData = self.BuildDatabase[ModuleFile, self.Arch, self.BuildTarget, self.ToolChain]
+ RetVal.update(ModuleData.Packages)
+ RetVal.update(self.Platform.Packages)
+ return list(RetVal)
+
+ @cached_property
+ def NonDynamicPcdDict(self):
+ return {(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):Pcd for Pcd in self.NonDynamicPcdList}
+
+ ## Get list of non-dynamic PCDs
+ @property
+ def NonDynamicPcdList(self):
+ if not self._NonDynamicPcdList:
+ self.CollectPlatformDynamicPcds()
+ return self._NonDynamicPcdList
+
+ ## Get list of dynamic PCDs
+ @property
+ def DynamicPcdList(self):
+ if not self._DynamicPcdList:
+ self.CollectPlatformDynamicPcds()
+ return self._DynamicPcdList
+
+ ## Generate Token Number for all PCD
+ @cached_property
+ def PcdTokenNumber(self):
+ RetVal = OrderedDict()
+ TokenNumber = 1
+ #
+ # Make the Dynamic and DynamicEx PCD use within different TokenNumber area.
+ # Such as:
+ #
+ # Dynamic PCD:
+ # TokenNumber 0 ~ 10
+ # DynamicEx PCD:
+ # TokeNumber 11 ~ 20
+ #
+ for Pcd in self.DynamicPcdList:
+ if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:
+ EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
+ RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
+ TokenNumber += 1
+
+ for Pcd in self.DynamicPcdList:
+ if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
+ RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
+ TokenNumber += 1
+
+ for Pcd in self.DynamicPcdList:
+ if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:
+ EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
+ RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
+ TokenNumber += 1
+
+ for Pcd in self.DynamicPcdList:
+ if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
+ RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
+ TokenNumber += 1
+
+ for Pcd in self.NonDynamicPcdList:
+ RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = 0
+ return RetVal
+
+ @cached_property
+ def _MbList(self):
+ ModuleList = []
+ for m in self.Platform.Modules:
+ component = self.Platform.Modules[m]
+ module = self.BuildDatabase[m, self.Arch, self.BuildTarget, self.ToolChain]
+ module.Guid = component.Guid
+ ModuleList.append(module)
+ return ModuleList
+
+ @cached_property
+ def _MaList(self):
+ for ModuleFile in self.Platform.Modules:
+ Ma = ModuleAutoGen(
+ self.Workspace,
+ ModuleFile,
+ self.BuildTarget,
+ self.ToolChain,
+ self.Arch,
+ self.MetaFile,
+ self.DataPipe
+ )
+ self.Platform.Modules[ModuleFile].M = Ma
+ return [x.M for x in self.Platform.Modules.values()]
+
+ ## Summarize ModuleAutoGen objects of all modules to be built for this platform
+ @cached_property
+ def ModuleAutoGenList(self):
+ RetVal = []
+ for Ma in self._MaList:
+ if Ma not in RetVal:
+ RetVal.append(Ma)
+ return RetVal
+
+ ## Summarize ModuleAutoGen objects of all libraries to be built for this platform
+ @cached_property
+ def LibraryAutoGenList(self):
+ RetVal = []
+ for Ma in self._MaList:
+ for La in Ma.LibraryAutoGenList:
+ if La not in RetVal:
+ RetVal.append(La)
+ if Ma not in La.ReferenceModules:
+ La.ReferenceModules.append(Ma)
+ return RetVal
+
+ ## Test if a module is supported by the platform
+ #
+ # An error will be raised directly if the module or its arch is not supported
+ # by the platform or current configuration
+ #
+ def ValidModule(self, Module):
+ return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances \
+ or Module in self._AsBuildModuleList
+ @cached_property
+ def GetAllModuleInfo(self,WithoutPcd=True):
+ ModuleLibs = set()
+ for m in self.Platform.Modules:
+ module_obj = self.BuildDatabase[m,self.Arch,self.BuildTarget,self.ToolChain]
+ if not bool(module_obj.LibraryClass):
+ Libs = GetModuleLibInstances(module_obj, self.Platform, self.BuildDatabase, self.Arch,self.BuildTarget,self.ToolChain,self.MetaFile,EdkLogger)
+ else:
+ Libs = []
+ ModuleLibs.update( set([(l.MetaFile.File,l.MetaFile.Root,l.MetaFile.Path,l.MetaFile.BaseName,l.MetaFile.OriginalPath,l.Arch,True) for l in Libs]))
+ if WithoutPcd and module_obj.PcdIsDriver:
+ continue
+ ModuleLibs.add((m.File,m.Root,m.Path,m.BaseName,m.OriginalPath,module_obj.Arch,bool(module_obj.LibraryClass)))
+
+ return ModuleLibs
+
+ ## Resolve the library classes in a module to library instances
+ #
+ # This method will not only resolve library classes but also sort the library
+ # instances according to the dependency-ship.
+ #
+ # @param Module The module from which the library classes will be resolved
+ #
+ # @retval library_list List of library instances sorted
+ #
+ def ApplyLibraryInstance(self, Module):
+ # Cover the case that the binary INF file is list in the FDF file but not DSC file, return empty list directly
+ if str(Module) not in self.Platform.Modules:
+ return []
+
+ return GetModuleLibInstances(Module,
+ self.Platform,
+ self.BuildDatabase,
+ self.Arch,
+ self.BuildTarget,
+ self.ToolChain,
+ self.MetaFile,
+ EdkLogger)
+
+ ## Override PCD setting (type, value, ...)
+ #
+ # @param ToPcd The PCD to be overridden
+ # @param FromPcd The PCD overriding from
+ #
+ def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):
+ #
+ # in case there's PCDs coming from FDF file, which have no type given.
+ # at this point, ToPcd.Type has the type found from dependent
+ # package
+ #
+ TokenCName = ToPcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ if FromPcd is not None:
+ if ToPcd.Pending and FromPcd.Type:
+ ToPcd.Type = FromPcd.Type
+ elif ToPcd.Type and FromPcd.Type\
+ and ToPcd.Type != FromPcd.Type and ToPcd.Type in FromPcd.Type:
+ if ToPcd.Type.strip() == TAB_PCDS_DYNAMIC_EX:
+ ToPcd.Type = FromPcd.Type
+ elif ToPcd.Type and FromPcd.Type \
+ and ToPcd.Type != FromPcd.Type:
+ if Library:
+ Module = str(Module) + " 's library file (" + str(Library) + ")"
+ EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
+ ExtraData="%s.%s is used as [%s] in module %s, but as [%s] in %s."\
+ % (ToPcd.TokenSpaceGuidCName, TokenCName,
+ ToPcd.Type, Module, FromPcd.Type, Msg),
+ File=self.MetaFile)
+
+ if FromPcd.MaxDatumSize:
+ ToPcd.MaxDatumSize = FromPcd.MaxDatumSize
+ ToPcd.MaxSizeUserSet = FromPcd.MaxDatumSize
+ if FromPcd.DefaultValue:
+ ToPcd.DefaultValue = FromPcd.DefaultValue
+ if FromPcd.TokenValue:
+ ToPcd.TokenValue = FromPcd.TokenValue
+ if FromPcd.DatumType:
+ ToPcd.DatumType = FromPcd.DatumType
+ if FromPcd.SkuInfoList:
+ ToPcd.SkuInfoList = FromPcd.SkuInfoList
+ if FromPcd.UserDefinedDefaultStoresFlag:
+ ToPcd.UserDefinedDefaultStoresFlag = FromPcd.UserDefinedDefaultStoresFlag
+ # Add Flexible PCD format parse
+ if ToPcd.DefaultValue:
+ try:
+ ToPcd.DefaultValue = ValueExpressionEx(ToPcd.DefaultValue, ToPcd.DatumType, self.Platform._GuidDict)(True)
+ except BadExpression as Value:
+ EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName, ToPcd.DefaultValue, Value),
+ File=self.MetaFile)
+
+ # check the validation of datum
+ IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue)
+ if not IsValid:
+ EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile,
+ ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, TokenCName))
+ ToPcd.validateranges = FromPcd.validateranges
+ ToPcd.validlists = FromPcd.validlists
+ ToPcd.expressions = FromPcd.expressions
+ ToPcd.CustomAttribute = FromPcd.CustomAttribute
+
+ if FromPcd is not None and ToPcd.DatumType == TAB_VOID and not ToPcd.MaxDatumSize:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \
+ % (ToPcd.TokenSpaceGuidCName, TokenCName))
+ Value = ToPcd.DefaultValue
+ if not Value:
+ ToPcd.MaxDatumSize = '1'
+ elif Value[0] == 'L':
+ ToPcd.MaxDatumSize = str((len(Value) - 2) * 2)
+ elif Value[0] == '{':
+ ToPcd.MaxDatumSize = str(len(Value.split(',')))
+ else:
+ ToPcd.MaxDatumSize = str(len(Value) - 1)
+
+ # apply default SKU for dynamic PCDS if specified one is not available
+ if (ToPcd.Type in PCD_DYNAMIC_TYPE_SET or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_SET) \
+ and not ToPcd.SkuInfoList:
+ if self.Platform.SkuName in self.Platform.SkuIds:
+ SkuName = self.Platform.SkuName
+ else:
+ SkuName = TAB_DEFAULT
+ ToPcd.SkuInfoList = {
+ SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName][0], '', '', '', '', '', ToPcd.DefaultValue)
+ }
+
+ ## Apply PCD setting defined platform to a module
+ #
+ # @param Module The module from which the PCD setting will be overridden
+ #
+ # @retval PCD_list The list PCDs with settings from platform
+ #
+ def ApplyPcdSetting(self, Module, Pcds, Library=""):
+ # for each PCD in module
+ for Name, Guid in Pcds:
+ PcdInModule = Pcds[Name, Guid]
+ # find out the PCD setting in platform
+ if (Name, Guid) in self.Platform.Pcds:
+ PcdInPlatform = self.Platform.Pcds[Name, Guid]
+ else:
+ PcdInPlatform = None
+ # then override the settings if any
+ self._OverridePcd(PcdInModule, PcdInPlatform, Module, Msg="DSC PCD sections", Library=Library)
+ # resolve the VariableGuid value
+ for SkuId in PcdInModule.SkuInfoList:
+ Sku = PcdInModule.SkuInfoList[SkuId]
+ if Sku.VariableGuid == '': continue
+ Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)
+ if Sku.VariableGuidValue is None:
+ PackageList = "\n\t".join(str(P) for P in self.PackageList)
+ EdkLogger.error(
+ 'build',
+ RESOURCE_NOT_AVAILABLE,
+ "Value of GUID [%s] is not found in" % Sku.VariableGuid,
+ ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \
+ % (Guid, Name, str(Module)),
+ File=self.MetaFile
+ )
+
+ # override PCD settings with module specific setting
+ if Module in self.Platform.Modules:
+ PlatformModule = self.Platform.Modules[str(Module)]
+ for Key in PlatformModule.Pcds:
+ if GlobalData.BuildOptionPcd:
+ for pcd in GlobalData.BuildOptionPcd:
+ (TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, _) = pcd
+ if (TokenCName, TokenSpaceGuidCName) == Key and FieldName =="":
+ PlatformModule.Pcds[Key].DefaultValue = pcdvalue
+ PlatformModule.Pcds[Key].PcdValueFromComm = pcdvalue
+ break
+ Flag = False
+ if Key in Pcds:
+ ToPcd = Pcds[Key]
+ Flag = True
+ elif Key in GlobalData.MixedPcd:
+ for PcdItem in GlobalData.MixedPcd[Key]:
+ if PcdItem in Pcds:
+ ToPcd = Pcds[PcdItem]
+ Flag = True
+ break
+ if Flag:
+ self._OverridePcd(ToPcd, PlatformModule.Pcds[Key], Module, Msg="DSC Components Module scoped PCD section", Library=Library)
+ # use PCD value to calculate the MaxDatumSize when it is not specified
+ for Name, Guid in Pcds:
+ Pcd = Pcds[Name, Guid]
+ if Pcd.DatumType == TAB_VOID and not Pcd.MaxDatumSize:
+ Pcd.MaxSizeUserSet = None
+ Value = Pcd.DefaultValue
+ if not Value:
+ Pcd.MaxDatumSize = '1'
+ elif Value[0] == 'L':
+ Pcd.MaxDatumSize = str((len(Value) - 2) * 2)
+ elif Value[0] == '{':
+ Pcd.MaxDatumSize = str(len(Value.split(',')))
+ else:
+ Pcd.MaxDatumSize = str(len(Value) - 1)
+ return list(Pcds.values())
+
+ ## Append build options in platform to a module
+ #
+ # @param Module The module to which the build options will be appended
+ #
+ # @retval options The options appended with build options in platform
+ #
+ def ApplyBuildOption(self, Module):
+ # Get the different options for the different style module
+ PlatformOptions = self.EdkIIBuildOption
+ ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)
+ ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)
+ ModuleOptions = self._ExpandBuildOption(Module.BuildOptions)
+ if Module in self.Platform.Modules:
+ PlatformModule = self.Platform.Modules[str(Module)]
+ PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)
+ else:
+ PlatformModuleOptions = {}
+
+ BuildRuleOrder = None
+ for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
+ for Tool in Options:
+ for Attr in Options[Tool]:
+ if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
+ BuildRuleOrder = Options[Tool][Attr]
+
+ AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +
+ list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +
+ list(self.ToolDefinition.keys()))
+ BuildOptions = defaultdict(lambda: defaultdict(str))
+ for Tool in AllTools:
+ for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
+ if Tool not in Options:
+ continue
+ for Attr in Options[Tool]:
+ #
+ # Do not generate it in Makefile
+ #
+ if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
+ continue
+ Value = Options[Tool][Attr]
+ ToolList = [Tool]
+ if Tool == TAB_STAR:
+ ToolList = list(AllTools)
+ ToolList.remove(TAB_STAR)
+ for ExpandedTool in ToolList:
+ # check if override is indicated
+ if Value.startswith('='):
+ BuildOptions[ExpandedTool][Attr] = mws.handleWsMacro(Value[1:])
+ else:
+ if Attr != 'PATH':
+ BuildOptions[ExpandedTool][Attr] += " " + mws.handleWsMacro(Value)
+ else:
+ BuildOptions[ExpandedTool][Attr] = mws.handleWsMacro(Value)
+
+ return BuildOptions, BuildRuleOrder
+
+
+ def GetGlobalBuildOptions(self,Module):
+ ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)
+ ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)
+
+ if Module in self.Platform.Modules:
+ PlatformModule = self.Platform.Modules[str(Module)]
+ PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)
+ else:
+ PlatformModuleOptions = {}
+
+ return ModuleTypeOptions,PlatformModuleOptions
+ def ModuleGuid(self,Module):
+ if os.path.basename(Module.MetaFile.File) != os.path.basename(Module.MetaFile.Path):
+ #
+ # Length of GUID is 36
+ #
+ return os.path.basename(Module.MetaFile.Path)[:36]
+ return Module.Guid
+ @cached_property
+ def UniqueBaseName(self):
+ retVal ={}
+ ModuleNameDict = {}
+ UniqueName = {}
+ for Module in self._MbList:
+ unique_base_name = '%s_%s' % (Module.BaseName,self.ModuleGuid(Module))
+ if unique_base_name not in ModuleNameDict:
+ ModuleNameDict[unique_base_name] = []
+ ModuleNameDict[unique_base_name].append(Module.MetaFile)
+ if Module.BaseName not in UniqueName:
+ UniqueName[Module.BaseName] = set()
+ UniqueName[Module.BaseName].add((self.ModuleGuid(Module),Module.MetaFile))
+ for module_paths in ModuleNameDict.values():
+ if len(set(module_paths))>1:
+ samemodules = list(set(module_paths))
+ EdkLogger.error("build", FILE_DUPLICATED, 'Modules have same BaseName and FILE_GUID:\n'
+ ' %s\n %s' % (samemodules[0], samemodules[1]))
+ for name in UniqueName:
+ Guid_Path = UniqueName[name]
+ if len(Guid_Path) > 1:
+ for guid,mpath in Guid_Path:
+ retVal[(name,mpath)] = '%s_%s' % (name,guid)
+ return retVal
+ ## Expand * in build option key
+ #
+ # @param Options Options to be expanded
+ # @param ToolDef Use specified ToolDef instead of full version.
+ # This is needed during initialization to prevent
+ # infinite recursion betweeh BuildOptions,
+ # ToolDefinition, and this function.
+ #
+ # @retval options Options expanded
+ #
+ def _ExpandBuildOption(self, Options, ModuleStyle=None, ToolDef=None):
+ if not ToolDef:
+ ToolDef = self.ToolDefinition
+ BuildOptions = {}
+ FamilyMatch = False
+ FamilyIsNull = True
+
+ OverrideList = {}
+ #
+ # Construct a list contain the build options which need override.
+ #
+ for Key in Options:
+ #
+ # Key[0] -- tool family
+ # Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
+ #
+ if (Key[0] == self.BuildRuleFamily and
+ (ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
+ Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
+ if (Target == self.BuildTarget or Target == TAB_STAR) and\
+ (ToolChain == self.ToolChain or ToolChain == TAB_STAR) and\
+ (Arch == self.Arch or Arch == TAB_STAR) and\
+ Options[Key].startswith("="):
+
+ if OverrideList.get(Key[1]) is not None:
+ OverrideList.pop(Key[1])
+ OverrideList[Key[1]] = Options[Key]
+
+ #
+ # Use the highest priority value.
+ #
+ if (len(OverrideList) >= 2):
+ KeyList = list(OverrideList.keys())
+ for Index in range(len(KeyList)):
+ NowKey = KeyList[Index]
+ Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
+ for Index1 in range(len(KeyList) - Index - 1):
+ NextKey = KeyList[Index1 + Index + 1]
+ #
+ # Compare two Key, if one is included by another, choose the higher priority one
+ #
+ Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")
+ if (Target1 == Target2 or Target1 == TAB_STAR or Target2 == TAB_STAR) and\
+ (ToolChain1 == ToolChain2 or ToolChain1 == TAB_STAR or ToolChain2 == TAB_STAR) and\
+ (Arch1 == Arch2 or Arch1 == TAB_STAR or Arch2 == TAB_STAR) and\
+ (CommandType1 == CommandType2 or CommandType1 == TAB_STAR or CommandType2 == TAB_STAR) and\
+ (Attr1 == Attr2 or Attr1 == TAB_STAR or Attr2 == TAB_STAR):
+
+ if CalculatePriorityValue(NowKey) > CalculatePriorityValue(NextKey):
+ if Options.get((self.BuildRuleFamily, NextKey)) is not None:
+ Options.pop((self.BuildRuleFamily, NextKey))
+ else:
+ if Options.get((self.BuildRuleFamily, NowKey)) is not None:
+ Options.pop((self.BuildRuleFamily, NowKey))
+
+ for Key in Options:
+ if ModuleStyle is not None and len (Key) > 2:
+ # Check Module style is EDK or EDKII.
+ # Only append build option for the matched style module.
+ if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
+ continue
+ elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
+ continue
+ Family = Key[0]
+ Target, Tag, Arch, Tool, Attr = Key[1].split("_")
+ # if tool chain family doesn't match, skip it
+ if Family != "":
+ Found = False
+ if Tool in ToolDef:
+ FamilyIsNull = False
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDef[Tool]:
+ if Family == ToolDef[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
+ FamilyMatch = True
+ Found = True
+ if TAB_STAR in ToolDef:
+ FamilyIsNull = False
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDef[TAB_STAR]:
+ if Family == ToolDef[TAB_STAR][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
+ FamilyMatch = True
+ Found = True
+ if not Found:
+ continue
+
+ # expand any wildcard
+ if Target == TAB_STAR or Target == self.BuildTarget:
+ if Tag == TAB_STAR or Tag == self.ToolChain:
+ if Arch == TAB_STAR or Arch == self.Arch:
+ if Tool not in BuildOptions:
+ BuildOptions[Tool] = {}
+ if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
+ BuildOptions[Tool][Attr] = Options[Key]
+ else:
+ # append options for the same tool except PATH
+ if Attr != 'PATH':
+ BuildOptions[Tool][Attr] += " " + Options[Key]
+ else:
+ BuildOptions[Tool][Attr] = Options[Key]
+ # Build Option Family has been checked, which need't to be checked again for family.
+ if FamilyMatch or FamilyIsNull:
+ return BuildOptions
+
+ for Key in Options:
+ if ModuleStyle is not None and len (Key) > 2:
+ # Check Module style is EDK or EDKII.
+ # Only append build option for the matched style module.
+ if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
+ continue
+ elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
+ continue
+ Family = Key[0]
+ Target, Tag, Arch, Tool, Attr = Key[1].split("_")
+ # if tool chain family doesn't match, skip it
+ if Family == "":
+ continue
+ # option has been added before
+ Found = False
+ if Tool in ToolDef:
+ if TAB_TOD_DEFINES_FAMILY in ToolDef[Tool]:
+ if Family == ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:
+ Found = True
+ if TAB_STAR in ToolDef:
+ if TAB_TOD_DEFINES_FAMILY in ToolDef[TAB_STAR]:
+ if Family == ToolDef[TAB_STAR][TAB_TOD_DEFINES_FAMILY]:
+ Found = True
+ if not Found:
+ continue
+
+ # expand any wildcard
+ if Target == TAB_STAR or Target == self.BuildTarget:
+ if Tag == TAB_STAR or Tag == self.ToolChain:
+ if Arch == TAB_STAR or Arch == self.Arch:
+ if Tool not in BuildOptions:
+ BuildOptions[Tool] = {}
+ if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
+ BuildOptions[Tool][Attr] = Options[Key]
+ else:
+ # append options for the same tool except PATH
+ if Attr != 'PATH':
+ BuildOptions[Tool][Attr] += " " + Options[Key]
+ else:
+ BuildOptions[Tool][Attr] = Options[Key]
+ return BuildOptions
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/StrGather.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/StrGather.py
new file mode 100755
index 00000000..895e2a75
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/StrGather.py
@@ -0,0 +1,630 @@
+## @file
+# This file is used to parse a strings file and create or add to a string database
+# file.
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import re
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from .UniClassObject import *
+from io import BytesIO
+from struct import pack, unpack
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+##
+# Static definitions
+#
+EFI_HII_SIBT_END = '0x00'
+EFI_HII_SIBT_STRING_SCSU = '0x10'
+EFI_HII_SIBT_STRING_SCSU_FONT = '0x11'
+EFI_HII_SIBT_STRINGS_SCSU = '0x12'
+EFI_HII_SIBT_STRINGS_SCSU_FONT = '0x13'
+EFI_HII_SIBT_STRING_UCS2 = '0x14'
+EFI_HII_SIBT_STRING_UCS2_FONT = '0x15'
+EFI_HII_SIBT_STRINGS_UCS2 = '0x16'
+EFI_HII_SIBT_STRINGS_UCS2_FONT = '0x17'
+EFI_HII_SIBT_DUPLICATE = '0x20'
+EFI_HII_SIBT_SKIP2 = '0x21'
+EFI_HII_SIBT_SKIP1 = '0x22'
+EFI_HII_SIBT_EXT1 = '0x30'
+EFI_HII_SIBT_EXT2 = '0x31'
+EFI_HII_SIBT_EXT4 = '0x32'
+EFI_HII_SIBT_FONT = '0x40'
+
+EFI_HII_PACKAGE_STRINGS = '0x04'
+EFI_HII_PACKAGE_FORM = '0x02'
+
+StringPackageType = EFI_HII_PACKAGE_STRINGS
+StringPackageForm = EFI_HII_PACKAGE_FORM
+StringBlockType = EFI_HII_SIBT_STRING_UCS2
+StringSkipType = EFI_HII_SIBT_SKIP2
+
+HexHeader = '0x'
+
+COMMENT = '// '
+DEFINE_STR = '#define'
+COMMENT_DEFINE_STR = COMMENT + DEFINE_STR
+NOT_REFERENCED = 'not referenced'
+COMMENT_NOT_REFERENCED = ' ' + COMMENT + NOT_REFERENCED
+CHAR_ARRAY_DEFIN = 'unsigned char'
+COMMON_FILE_NAME = 'Strings'
+STRING_TOKEN = re.compile('STRING_TOKEN *\(([A-Z0-9_]+) *\)', re.MULTILINE | re.UNICODE)
+
+EFI_HII_ARRAY_SIZE_LENGTH = 4
+EFI_HII_PACKAGE_HEADER_LENGTH = 4
+EFI_HII_HDR_SIZE_LENGTH = 4
+EFI_HII_STRING_OFFSET_LENGTH = 4
+EFI_STRING_ID = 1
+EFI_STRING_ID_LENGTH = 2
+EFI_HII_LANGUAGE_WINDOW = 0
+EFI_HII_LANGUAGE_WINDOW_LENGTH = 2
+EFI_HII_LANGUAGE_WINDOW_NUMBER = 16
+EFI_HII_STRING_PACKAGE_HDR_LENGTH = EFI_HII_PACKAGE_HEADER_LENGTH + EFI_HII_HDR_SIZE_LENGTH + EFI_HII_STRING_OFFSET_LENGTH + EFI_HII_LANGUAGE_WINDOW_LENGTH * EFI_HII_LANGUAGE_WINDOW_NUMBER + EFI_STRING_ID_LENGTH
+
+H_C_FILE_HEADER = ['//', \
+ '// DO NOT EDIT -- auto-generated file', \
+ '//', \
+ '// This file is generated by the StrGather utility', \
+ '//']
+LANGUAGE_NAME_STRING_NAME = '$LANGUAGE_NAME'
+PRINTABLE_LANGUAGE_NAME_STRING_NAME = '$PRINTABLE_LANGUAGE_NAME'
+
+## Convert a dec number to a hex string
+#
+# Convert a dec number to a formatted hex string in length digit
+# The digit is set to default 8
+# The hex string starts with "0x"
+# DecToHexStr(1000) is '0x000003E8'
+# DecToHexStr(1000, 6) is '0x0003E8'
+#
+# @param Dec: The number in dec format
+# @param Digit: The needed digit of hex string
+#
+# @retval: The formatted hex string
+#
+def DecToHexStr(Dec, Digit = 8):
+ return '0x{0:0{1}X}'.format(Dec, Digit)
+
+## Convert a dec number to a hex list
+#
+# Convert a dec number to a formatted hex list in size digit
+# The digit is set to default 8
+# DecToHexList(1000) is ['0xE8', '0x03', '0x00', '0x00']
+# DecToHexList(1000, 6) is ['0xE8', '0x03', '0x00']
+#
+# @param Dec: The number in dec format
+# @param Digit: The needed digit of hex list
+#
+# @retval: A list for formatted hex string
+#
+def DecToHexList(Dec, Digit = 8):
+ Hex = '{0:0{1}X}'.format(Dec, Digit)
+ return ["0x" + Hex[Bit:Bit + 2] for Bit in range(Digit - 2, -1, -2)]
+
+## Convert a acsii string to a hex list
+#
+# Convert a acsii string to a formatted hex list
+# AscToHexList('en-US') is ['0x65', '0x6E', '0x2D', '0x55', '0x53']
+#
+# @param Ascii: The acsii string
+#
+# @retval: A list for formatted hex string
+#
+def AscToHexList(Ascii):
+ try:
+ return ['0x{0:02X}'.format(Item) for Item in Ascii]
+ except:
+ return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]
+
+## Create content of .h file
+#
+# Create content of .h file
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass A UniObjectClass instance
+# @param IsCompatibleMode Compatible mode
+# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
+#
+# @retval Str: A string of .h file content
+#
+def CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
+ Str = []
+ ValueStartPtr = 60
+ Line = COMMENT_DEFINE_STR + ' ' + LANGUAGE_NAME_STRING_NAME + ' ' * (ValueStartPtr - len(DEFINE_STR + LANGUAGE_NAME_STRING_NAME)) + DecToHexStr(0, 4) + COMMENT_NOT_REFERENCED
+ Str = WriteLine(Str, Line)
+ Line = COMMENT_DEFINE_STR + ' ' + PRINTABLE_LANGUAGE_NAME_STRING_NAME + ' ' * (ValueStartPtr - len(DEFINE_STR + PRINTABLE_LANGUAGE_NAME_STRING_NAME)) + DecToHexStr(1, 4) + COMMENT_NOT_REFERENCED
+ Str = WriteLine(Str, Line)
+ UnusedStr = ''
+
+ #Group the referred/Unused STRING token together.
+ for Index in range(2, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]])):
+ StringItem = UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]][Index]
+ Name = StringItem.StringName
+ Token = StringItem.Token
+ Referenced = StringItem.Referenced
+ if Name is not None:
+ Line = ''
+ if Referenced == True:
+ if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
+ Line = DEFINE_STR + ' ' + Name + ' ' + DecToHexStr(Token, 4)
+ else:
+ Line = DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4)
+ Str = WriteLine(Str, Line)
+ else:
+ if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
+ Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
+ else:
+ Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
+ UnusedStr = WriteLine(UnusedStr, Line)
+
+ Str.extend( UnusedStr)
+
+ Str = WriteLine(Str, '')
+ if IsCompatibleMode or UniGenCFlag:
+ Str = WriteLine(Str, 'extern unsigned char ' + BaseName + 'Strings[];')
+ return "".join(Str)
+
+## Create a complete .h file
+#
+# Create a complet .h file with file header and file content
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass A UniObjectClass instance
+# @param IsCompatibleMode Compatible mode
+# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
+#
+# @retval Str: A string of complete .h file
+#
+def CreateHFile(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
+ HFile = WriteLine('', CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag))
+
+ return "".join(HFile)
+
+## Create a buffer to store all items in an array
+#
+# @param BinBuffer Buffer to contain Binary data.
+# @param Array: The array need to be formatted
+#
+def CreateBinBuffer(BinBuffer, Array):
+ for Item in Array:
+ BinBuffer.write(pack("B", int(Item, 16)))
+
+## Create a formatted string all items in an array
+#
+# Use ',' to join each item in an array, and break an new line when reaching the width (default is 16)
+#
+# @param Array: The array need to be formatted
+# @param Width: The line length, the default value is set to 16
+#
+# @retval ArrayItem: A string for all formatted array items
+#
+def CreateArrayItem(Array, Width = 16):
+ MaxLength = Width
+ Index = 0
+ Line = ' '
+ ArrayItem = []
+
+ for Item in Array:
+ if Index < MaxLength:
+ Line = Line + Item + ', '
+ Index = Index + 1
+ else:
+ ArrayItem = WriteLine(ArrayItem, Line)
+ Line = ' ' + Item + ', '
+ Index = 1
+ ArrayItem = Write(ArrayItem, Line.rstrip())
+
+ return "".join(ArrayItem)
+
+## CreateCFileStringValue
+#
+# Create a line with string value
+#
+# @param Value: Value of the string
+#
+# @retval Str: A formatted string with string value
+#
+
+def CreateCFileStringValue(Value):
+ Value = [StringBlockType] + Value
+ Str = WriteLine('', CreateArrayItem(Value))
+
+ return "".join(Str)
+
+## GetFilteredLanguage
+#
+# apply get best language rules to the UNI language code list
+#
+# @param UniLanguageList: language code definition list in *.UNI file
+# @param LanguageFilterList: language code filter list of RFC4646 format in DSC file
+#
+# @retval UniLanguageListFiltered: the filtered language code
+#
+def GetFilteredLanguage(UniLanguageList, LanguageFilterList):
+ UniLanguageListFiltered = []
+ # if filter list is empty, then consider there is no filter
+ if LanguageFilterList == []:
+ UniLanguageListFiltered = UniLanguageList
+ return UniLanguageListFiltered
+ for Language in LanguageFilterList:
+ # first check for exact match
+ if Language in UniLanguageList:
+ if Language not in UniLanguageListFiltered:
+ UniLanguageListFiltered.append(Language)
+ # find the first one with the same/equivalent primary tag
+ else:
+ if Language.find('-') != -1:
+ PrimaryTag = Language[0:Language.find('-')].lower()
+ else:
+ PrimaryTag = Language
+
+ if len(PrimaryTag) == 3:
+ PrimaryTag = LangConvTable.get(PrimaryTag)
+
+ for UniLanguage in UniLanguageList:
+ if UniLanguage.find('-') != -1:
+ UniLanguagePrimaryTag = UniLanguage[0:UniLanguage.find('-')].lower()
+ else:
+ UniLanguagePrimaryTag = UniLanguage
+
+ if len(UniLanguagePrimaryTag) == 3:
+ UniLanguagePrimaryTag = LangConvTable.get(UniLanguagePrimaryTag)
+
+ if PrimaryTag == UniLanguagePrimaryTag:
+ if UniLanguage not in UniLanguageListFiltered:
+ UniLanguageListFiltered.append(UniLanguage)
+ break
+ else:
+ # Here is rule 3 for "get best language"
+ # If tag is not listed in the Unicode file, the default ("en") tag should be used for that language
+ # for better processing, find the one that best suit for it.
+ DefaultTag = 'en'
+ if DefaultTag not in UniLanguageListFiltered:
+ # check whether language code with primary code equivalent with DefaultTag already in the list, if so, use that
+ for UniLanguage in UniLanguageList:
+ if UniLanguage.startswith('en-') or UniLanguage.startswith('eng-'):
+ if UniLanguage not in UniLanguageListFiltered:
+ UniLanguageListFiltered.append(UniLanguage)
+ break
+ else:
+ UniLanguageListFiltered.append(DefaultTag)
+ return UniLanguageListFiltered
+
+
+## Create content of .c file
+#
+# Create content of .c file
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass A UniObjectClass instance
+# @param IsCompatibleMode Compatible mode
+# @param UniBinBuffer UniBinBuffer to contain UniBinary data.
+# @param FilterInfo Platform language filter information
+#
+# @retval Str: A string of .c file content
+#
+def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer, FilterInfo):
+ #
+ # Init array length
+ #
+ TotalLength = EFI_HII_ARRAY_SIZE_LENGTH
+ Str = ''
+ Offset = 0
+
+ EDK2Module = FilterInfo[0]
+ if EDK2Module:
+ LanguageFilterList = FilterInfo[1]
+ else:
+ # EDK module is using ISO639-2 format filter, convert to the RFC4646 format
+ LanguageFilterList = [LangConvTable.get(F.lower()) for F in FilterInfo[1]]
+
+ UniLanguageList = []
+ for IndexI in range(len(UniObjectClass.LanguageDef)):
+ UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]]
+
+ UniLanguageListFiltered = GetFilteredLanguage(UniLanguageList, LanguageFilterList)
+
+
+ #
+ # Create lines for each language's strings
+ #
+ for IndexI in range(len(UniObjectClass.LanguageDef)):
+ Language = UniObjectClass.LanguageDef[IndexI][0]
+ if Language not in UniLanguageListFiltered:
+ continue
+
+ StringBuffer = BytesIO()
+ StrStringValue = ''
+ ArrayLength = 0
+ NumberOfUseOtherLangDef = 0
+ Index = 0
+ for IndexJ in range(1, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[IndexI][0]])):
+ Item = UniObjectClass.OrderedStringListByToken[Language][IndexJ]
+
+ Name = Item.StringName
+ Value = Item.StringValueByteList
+ Referenced = Item.Referenced
+ Token = Item.Token
+ UseOtherLangDef = Item.UseOtherLangDef
+
+ if UseOtherLangDef != '' and Referenced:
+ NumberOfUseOtherLangDef = NumberOfUseOtherLangDef + 1
+ Index = Index + 1
+ else:
+ if NumberOfUseOtherLangDef > 0:
+ StrStringValue = WriteLine(StrStringValue, CreateArrayItem([StringSkipType] + DecToHexList(NumberOfUseOtherLangDef, 4)))
+ CreateBinBuffer (StringBuffer, ([StringSkipType] + DecToHexList(NumberOfUseOtherLangDef, 4)))
+ NumberOfUseOtherLangDef = 0
+ ArrayLength = ArrayLength + 3
+ if Referenced and Item.Token > 0:
+ Index = Index + 1
+ StrStringValue = WriteLine(StrStringValue, "// %s: %s:%s" % (DecToHexStr(Index, 4), Name, DecToHexStr(Token, 4)))
+ StrStringValue = Write(StrStringValue, CreateCFileStringValue(Value))
+ CreateBinBuffer (StringBuffer, [StringBlockType] + Value)
+ ArrayLength = ArrayLength + Item.Length + 1 # 1 is for the length of string type
+
+ #
+ # EFI_HII_PACKAGE_HEADER
+ #
+ Offset = EFI_HII_STRING_PACKAGE_HDR_LENGTH + len(Language) + 1
+ ArrayLength = Offset + ArrayLength + 1
+
+ #
+ # Create PACKAGE HEADER
+ #
+ Str = WriteLine(Str, '// PACKAGE HEADER\n')
+ TotalLength = TotalLength + ArrayLength
+
+ List = DecToHexList(ArrayLength, 6) + \
+ [StringPackageType] + \
+ DecToHexList(Offset) + \
+ DecToHexList(Offset) + \
+ DecToHexList(EFI_HII_LANGUAGE_WINDOW, EFI_HII_LANGUAGE_WINDOW_LENGTH * 2) * EFI_HII_LANGUAGE_WINDOW_NUMBER + \
+ DecToHexList(EFI_STRING_ID, 4) + \
+ AscToHexList(Language) + \
+ DecToHexList(0, 2)
+ Str = WriteLine(Str, CreateArrayItem(List, 16) + '\n')
+
+ #
+ # Create PACKAGE DATA
+ #
+ Str = WriteLine(Str, '// PACKAGE DATA\n')
+ Str = Write(Str, StrStringValue)
+
+ #
+ # Add an EFI_HII_SIBT_END at last
+ #
+ Str = WriteLine(Str, ' ' + EFI_HII_SIBT_END + ",")
+
+ #
+ # Create binary UNI string
+ #
+ if UniBinBuffer:
+ CreateBinBuffer (UniBinBuffer, List)
+ UniBinBuffer.write (StringBuffer.getvalue())
+ UniBinBuffer.write (pack("B", int(EFI_HII_SIBT_END, 16)))
+ StringBuffer.close()
+
+ #
+ # Create line for string variable name
+ # "unsigned char $(BaseName)Strings[] = {"
+ #
+ AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName + COMMON_FILE_NAME + '[] = {\n')
+
+ if IsCompatibleMode:
+ #
+ # Create FRAMEWORK_EFI_HII_PACK_HEADER in compatible mode
+ #
+ AllStr = WriteLine(AllStr, '// FRAMEWORK PACKAGE HEADER Length')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength + 2)) + '\n')
+ AllStr = WriteLine(AllStr, '// FRAMEWORK PACKAGE HEADER Type')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(2, 4)) + '\n')
+ else:
+ #
+ # Create whole array length in UEFI mode
+ #
+ AllStr = WriteLine(AllStr, '// STRGATHER_OUTPUT_HEADER')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength)) + '\n')
+
+ #
+ # Join package data
+ #
+ AllStr = Write(AllStr, Str)
+
+ return "".join(AllStr)
+
+## Create end of .c file
+#
+# Create end of .c file
+#
+# @retval Str: A string of .h file end
+#
+def CreateCFileEnd():
+ Str = Write('', '};')
+ return Str
+
+## Create a .c file
+#
+# Create a complete .c file
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass A UniObjectClass instance
+# @param IsCompatibleMode Compatible Mode
+# @param FilterInfo Platform language filter information
+#
+# @retval CFile: A string of complete .c file
+#
+def CreateCFile(BaseName, UniObjectClass, IsCompatibleMode, FilterInfo):
+ CFile = ''
+ CFile = WriteLine(CFile, CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, None, FilterInfo))
+ CFile = WriteLine(CFile, CreateCFileEnd())
+ return "".join(CFile)
+
+## GetFileList
+#
+# Get a list for all files
+#
+# @param IncludeList: A list of all path to be searched
+# @param SkipList: A list of all types of file could be skipped
+#
+# @retval FileList: A list of all files found
+#
+def GetFileList(SourceFileList, IncludeList, SkipList):
+ if IncludeList is None:
+ EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "Include path for unicode file is not defined")
+
+ FileList = []
+ if SkipList is None:
+ SkipList = []
+
+ for File in SourceFileList:
+ for Dir in IncludeList:
+ if not os.path.exists(Dir):
+ continue
+ File = os.path.join(Dir, File.Path)
+ #
+ # Ignore Dir
+ #
+ if os.path.isfile(File) != True:
+ continue
+ #
+ # Ignore file listed in skip list
+ #
+ IsSkip = False
+ for Skip in SkipList:
+ if os.path.splitext(File)[1].upper() == Skip.upper():
+ EdkLogger.verbose("Skipped %s for string token uses search" % File)
+ IsSkip = True
+ break
+
+ if not IsSkip:
+ FileList.append(File)
+
+ break
+
+ return FileList
+
+## SearchString
+#
+# Search whether all string defined in UniObjectClass are referenced
+# All string used should be set to Referenced
+#
+# @param UniObjectClass: Input UniObjectClass
+# @param FileList: Search path list
+# @param IsCompatibleMode Compatible Mode
+#
+# @retval UniObjectClass: UniObjectClass after searched
+#
+def SearchString(UniObjectClass, FileList, IsCompatibleMode):
+ if FileList == []:
+ return UniObjectClass
+
+ for File in FileList:
+ try:
+ if os.path.isfile(File):
+ Lines = open(File, 'r')
+ for Line in Lines:
+ for StrName in STRING_TOKEN.findall(Line):
+ EdkLogger.debug(EdkLogger.DEBUG_5, "Found string identifier: " + StrName)
+ UniObjectClass.SetStringReferenced(StrName)
+ except:
+ EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "SearchString: Error while processing file", File=File, RaiseError=False)
+ raise
+
+ UniObjectClass.ReToken()
+
+ return UniObjectClass
+
+## GetStringFiles
+#
+# This function is used for UEFI2.1 spec
+#
+#
+def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, SkipList, BaseName, IsCompatibleMode = False, ShellMode = False, UniGenCFlag = True, UniGenBinBuffer = None, FilterInfo = [True, []]):
+ if len(UniFilList) > 0:
+ if ShellMode:
+ #
+ # support ISO 639-2 codes in .UNI files of EDK Shell
+ #
+ Uni = UniFileClassObject(sorted(UniFilList, key=lambda x: x.File), True, IncludePathList)
+ else:
+ Uni = UniFileClassObject(sorted(UniFilList, key=lambda x: x.File), IsCompatibleMode, IncludePathList)
+ else:
+ EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, 'No unicode files given')
+
+ FileList = GetFileList(SourceFileList, IncludeList, SkipList)
+
+ Uni = SearchString(Uni, sorted (FileList), IsCompatibleMode)
+
+ HFile = CreateHFile(BaseName, Uni, IsCompatibleMode, UniGenCFlag)
+ CFile = None
+ if IsCompatibleMode or UniGenCFlag:
+ CFile = CreateCFile(BaseName, Uni, IsCompatibleMode, FilterInfo)
+ if UniGenBinBuffer:
+ CreateCFileContent(BaseName, Uni, IsCompatibleMode, UniGenBinBuffer, FilterInfo)
+
+ return HFile, CFile
+
+#
+# Write an item
+#
+def Write(Target, Item):
+ if isinstance(Target,str):
+ Target = [Target]
+ if not Target:
+ Target = []
+ if isinstance(Item,list):
+ Target.extend(Item)
+ else:
+ Target.append(Item)
+ return Target
+
+#
+# Write an item with a break line
+#
+def WriteLine(Target, Item):
+ if isinstance(Target,str):
+ Target = [Target]
+ if not Target:
+ Target = []
+ if isinstance(Item, list):
+ Target.extend(Item)
+ else:
+ Target.append(Item)
+ Target.append('\n')
+ return Target
+
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+if __name__ == '__main__':
+ EdkLogger.info('start')
+
+ UniFileList = [
+ r'C:\\Edk\\Strings2.uni',
+ r'C:\\Edk\\Strings.uni'
+ ]
+
+ SrcFileList = []
+ for Root, Dirs, Files in os.walk('C:\\Edk'):
+ for File in Files:
+ SrcFileList.append(File)
+
+ IncludeList = [
+ r'C:\\Edk'
+ ]
+
+ SkipList = ['.inf', '.uni']
+ BaseName = 'DriverSample'
+ (h, c) = GetStringFiles(UniFileList, SrcFileList, IncludeList, SkipList, BaseName, True)
+ hfile = open('unistring.h', 'w')
+ cfile = open('unistring.c', 'w')
+ hfile.write(h)
+ cfile.write(c)
+
+ EdkLogger.info('end')
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/UniClassObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/UniClassObject.py
new file mode 100755
index 00000000..a13a27b5
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/UniClassObject.py
@@ -0,0 +1,683 @@
+## @file
+# This file is used to collect all defined strings in multiple uni files
+#
+#
+# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.<BR>
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+##
+# Import Modules
+#
+from __future__ import print_function
+import Common.LongFilePathOs as os, codecs, re
+import distutils.util
+import Common.EdkLogger as EdkLogger
+from io import BytesIO
+from Common.BuildToolError import *
+from Common.StringUtils import GetLineNo
+from Common.Misc import PathClass
+from Common.LongFilePathSupport import LongFilePath
+from Common.GlobalData import *
+##
+# Static definitions
+#
+UNICODE_WIDE_CHAR = u'\\wide'
+UNICODE_NARROW_CHAR = u'\\narrow'
+UNICODE_NON_BREAKING_CHAR = u'\\nbr'
+UNICODE_UNICODE_CR = '\r'
+UNICODE_UNICODE_LF = '\n'
+
+NARROW_CHAR = u'\uFFF0'
+WIDE_CHAR = u'\uFFF1'
+NON_BREAKING_CHAR = u'\uFFF2'
+CR = u'\u000D'
+LF = u'\u000A'
+NULL = u'\u0000'
+TAB = u'\t'
+BACK_SLASH_PLACEHOLDER = u'\u0006'
+
+gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE)
+
+## Convert a unicode string to a Hex list
+#
+# Convert a unicode string to a Hex list
+# UniToHexList('ABC') is ['0x41', '0x00', '0x42', '0x00', '0x43', '0x00']
+#
+# @param Uni: The python unicode string
+#
+# @retval List: The formatted hex list
+#
+def UniToHexList(Uni):
+ List = []
+ for Item in Uni:
+ Temp = '%04X' % ord(Item)
+ List.append('0x' + Temp[2:4])
+ List.append('0x' + Temp[0:2])
+ return List
+
+LangConvTable = {'eng':'en', 'fra':'fr', \
+ 'aar':'aa', 'abk':'ab', 'ave':'ae', 'afr':'af', 'aka':'ak', 'amh':'am', \
+ 'arg':'an', 'ara':'ar', 'asm':'as', 'ava':'av', 'aym':'ay', 'aze':'az', \
+ 'bak':'ba', 'bel':'be', 'bul':'bg', 'bih':'bh', 'bis':'bi', 'bam':'bm', \
+ 'ben':'bn', 'bod':'bo', 'bre':'br', 'bos':'bs', 'cat':'ca', 'che':'ce', \
+ 'cha':'ch', 'cos':'co', 'cre':'cr', 'ces':'cs', 'chu':'cu', 'chv':'cv', \
+ 'cym':'cy', 'dan':'da', 'deu':'de', 'div':'dv', 'dzo':'dz', 'ewe':'ee', \
+ 'ell':'el', 'epo':'eo', 'spa':'es', 'est':'et', 'eus':'eu', 'fas':'fa', \
+ 'ful':'ff', 'fin':'fi', 'fij':'fj', 'fao':'fo', 'fry':'fy', 'gle':'ga', \
+ 'gla':'gd', 'glg':'gl', 'grn':'gn', 'guj':'gu', 'glv':'gv', 'hau':'ha', \
+ 'heb':'he', 'hin':'hi', 'hmo':'ho', 'hrv':'hr', 'hat':'ht', 'hun':'hu', \
+ 'hye':'hy', 'her':'hz', 'ina':'ia', 'ind':'id', 'ile':'ie', 'ibo':'ig', \
+ 'iii':'ii', 'ipk':'ik', 'ido':'io', 'isl':'is', 'ita':'it', 'iku':'iu', \
+ 'jpn':'ja', 'jav':'jv', 'kat':'ka', 'kon':'kg', 'kik':'ki', 'kua':'kj', \
+ 'kaz':'kk', 'kal':'kl', 'khm':'km', 'kan':'kn', 'kor':'ko', 'kau':'kr', \
+ 'kas':'ks', 'kur':'ku', 'kom':'kv', 'cor':'kw', 'kir':'ky', 'lat':'la', \
+ 'ltz':'lb', 'lug':'lg', 'lim':'li', 'lin':'ln', 'lao':'lo', 'lit':'lt', \
+ 'lub':'lu', 'lav':'lv', 'mlg':'mg', 'mah':'mh', 'mri':'mi', 'mkd':'mk', \
+ 'mal':'ml', 'mon':'mn', 'mar':'mr', 'msa':'ms', 'mlt':'mt', 'mya':'my', \
+ 'nau':'na', 'nob':'nb', 'nde':'nd', 'nep':'ne', 'ndo':'ng', 'nld':'nl', \
+ 'nno':'nn', 'nor':'no', 'nbl':'nr', 'nav':'nv', 'nya':'ny', 'oci':'oc', \
+ 'oji':'oj', 'orm':'om', 'ori':'or', 'oss':'os', 'pan':'pa', 'pli':'pi', \
+ 'pol':'pl', 'pus':'ps', 'por':'pt', 'que':'qu', 'roh':'rm', 'run':'rn', \
+ 'ron':'ro', 'rus':'ru', 'kin':'rw', 'san':'sa', 'srd':'sc', 'snd':'sd', \
+ 'sme':'se', 'sag':'sg', 'sin':'si', 'slk':'sk', 'slv':'sl', 'smo':'sm', \
+ 'sna':'sn', 'som':'so', 'sqi':'sq', 'srp':'sr', 'ssw':'ss', 'sot':'st', \
+ 'sun':'su', 'swe':'sv', 'swa':'sw', 'tam':'ta', 'tel':'te', 'tgk':'tg', \
+ 'tha':'th', 'tir':'ti', 'tuk':'tk', 'tgl':'tl', 'tsn':'tn', 'ton':'to', \
+ 'tur':'tr', 'tso':'ts', 'tat':'tt', 'twi':'tw', 'tah':'ty', 'uig':'ug', \
+ 'ukr':'uk', 'urd':'ur', 'uzb':'uz', 'ven':'ve', 'vie':'vi', 'vol':'vo', \
+ 'wln':'wa', 'wol':'wo', 'xho':'xh', 'yid':'yi', 'yor':'yo', 'zha':'za', \
+ 'zho':'zh', 'zul':'zu'}
+
+## GetLanguageCode
+#
+# Check the language code read from .UNI file and convert ISO 639-2 codes to RFC 4646 codes if appropriate
+# ISO 639-2 language codes supported in compatibility mode
+# RFC 4646 language codes supported in native mode
+#
+# @param LangName: Language codes read from .UNI file
+#
+# @retval LangName: Valid language code in RFC 4646 format or None
+#
+def GetLanguageCode(LangName, IsCompatibleMode, File):
+ length = len(LangName)
+ if IsCompatibleMode:
+ if length == 3 and LangName.isalpha():
+ TempLangName = LangConvTable.get(LangName.lower())
+ if TempLangName is not None:
+ return TempLangName
+ return LangName
+ else:
+ EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid ISO 639-2 language code : %s" % LangName, File)
+
+ if (LangName[0] == 'X' or LangName[0] == 'x') and LangName[1] == '-':
+ return LangName
+ if length == 2:
+ if LangName.isalpha():
+ return LangName
+ elif length == 3:
+ if LangName.isalpha() and LangConvTable.get(LangName.lower()) is None:
+ return LangName
+ elif length == 5:
+ if LangName[0:2].isalpha() and LangName[2] == '-':
+ return LangName
+ elif length >= 6:
+ if LangName[0:2].isalpha() and LangName[2] == '-':
+ return LangName
+ if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) is None and LangName[3] == '-':
+ return LangName
+
+ EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid RFC 4646 language code : %s" % LangName, File)
+
+## Ucs2Codec
+#
+# This is only a partial codec implementation. It only supports
+# encoding, and is primarily used to check that all the characters are
+# valid for UCS-2.
+#
+class Ucs2Codec(codecs.Codec):
+ def __init__(self):
+ self.__utf16 = codecs.lookup('utf-16')
+
+ def encode(self, input, errors='strict'):
+ for Char in input:
+ CodePoint = ord(Char)
+ if CodePoint >= 0xd800 and CodePoint <= 0xdfff:
+ raise ValueError("Code Point is in range reserved for " +
+ "UTF-16 surrogate pairs")
+ elif CodePoint > 0xffff:
+ raise ValueError("Code Point too large to encode in UCS-2")
+ return self.__utf16.encode(input)
+
+TheUcs2Codec = Ucs2Codec()
+def Ucs2Search(name):
+ if name in ['ucs-2', 'ucs_2']:
+ return codecs.CodecInfo(
+ name=name,
+ encode=TheUcs2Codec.encode,
+ decode=TheUcs2Codec.decode)
+ else:
+ return None
+codecs.register(Ucs2Search)
+
+## StringDefClassObject
+#
+# A structure for language definition
+#
+class StringDefClassObject(object):
+ def __init__(self, Name = None, Value = None, Referenced = False, Token = None, UseOtherLangDef = ''):
+ self.StringName = ''
+ self.StringNameByteList = []
+ self.StringValue = ''
+ self.StringValueByteList = ''
+ self.Token = 0
+ self.Referenced = Referenced
+ self.UseOtherLangDef = UseOtherLangDef
+ self.Length = 0
+
+ if Name is not None:
+ self.StringName = Name
+ self.StringNameByteList = UniToHexList(Name)
+ if Value is not None:
+ self.StringValue = Value + u'\x00' # Add a NULL at string tail
+ self.StringValueByteList = UniToHexList(self.StringValue)
+ self.Length = len(self.StringValueByteList)
+ if Token is not None:
+ self.Token = Token
+
+ def __str__(self):
+ return repr(self.StringName) + ' ' + \
+ repr(self.Token) + ' ' + \
+ repr(self.Referenced) + ' ' + \
+ repr(self.StringValue) + ' ' + \
+ repr(self.UseOtherLangDef)
+
+ def UpdateValue(self, Value = None):
+ if Value is not None:
+ self.StringValue = Value + u'\x00' # Add a NULL at string tail
+ self.StringValueByteList = UniToHexList(self.StringValue)
+ self.Length = len(self.StringValueByteList)
+
+def StripComments(Line):
+ Comment = u'//'
+ CommentPos = Line.find(Comment)
+ while CommentPos >= 0:
+ # if there are non matched quotes before the comment header
+ # then we are in the middle of a string
+ # but we need to ignore the escaped quotes and backslashes.
+ if ((Line.count(u'"', 0, CommentPos) - Line.count(u'\\"', 0, CommentPos)) & 1) == 1:
+ CommentPos = Line.find (Comment, CommentPos + 1)
+ else:
+ return Line[:CommentPos].strip()
+ return Line.strip()
+
+## UniFileClassObject
+#
+# A structure for .uni file definition
+#
+class UniFileClassObject(object):
+ def __init__(self, FileList = [], IsCompatibleMode = False, IncludePathList = []):
+ self.FileList = FileList
+ self.Token = 2
+ self.LanguageDef = [] #[ [u'LanguageIdentifier', u'PrintableName'], ... ]
+ self.OrderedStringList = {} #{ u'LanguageIdentifier' : [StringDefClassObject] }
+ self.OrderedStringDict = {} #{ u'LanguageIdentifier' : {StringName:(IndexInList)} }
+ self.OrderedStringListByToken = {} #{ u'LanguageIdentifier' : {Token: StringDefClassObject} }
+ self.IsCompatibleMode = IsCompatibleMode
+ self.IncludePathList = IncludePathList
+ if len(self.FileList) > 0:
+ self.LoadUniFiles(FileList)
+
+ #
+ # Get Language definition
+ #
+ def GetLangDef(self, File, Line):
+ Lang = distutils.util.split_quoted((Line.split(u"//")[0]))
+ if len(Lang) != 3:
+ try:
+ FileIn = UniFileClassObject.OpenUniFile(LongFilePath(File.Path))
+ except UnicodeError as X:
+ EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File);
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File);
+ LineNo = GetLineNo(FileIn, Line, False)
+ EdkLogger.error("Unicode File Parser", PARSER_ERROR, "Wrong language definition",
+ ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line, File=File, Line=LineNo)
+ else:
+ LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File)
+ LangPrintName = Lang[2]
+
+ IsLangInDef = False
+ for Item in self.LanguageDef:
+ if Item[0] == LangName:
+ IsLangInDef = True
+ break;
+
+ if not IsLangInDef:
+ self.LanguageDef.append([LangName, LangPrintName])
+
+ #
+ # Add language string
+ #
+ self.AddStringToList(u'$LANGUAGE_NAME', LangName, LangName, 0, True, Index=0)
+ self.AddStringToList(u'$PRINTABLE_LANGUAGE_NAME', LangName, LangPrintName, 1, True, Index=1)
+
+ if not IsLangInDef:
+ #
+ # The found STRING tokens will be added into new language string list
+ # so that the unique STRING identifier is reserved for all languages in the package list.
+ #
+ FirstLangName = self.LanguageDef[0][0]
+ if LangName != FirstLangName:
+ for Index in range (2, len (self.OrderedStringList[FirstLangName])):
+ Item = self.OrderedStringList[FirstLangName][Index]
+ if Item.UseOtherLangDef != '':
+ OtherLang = Item.UseOtherLangDef
+ else:
+ OtherLang = FirstLangName
+ self.OrderedStringList[LangName].append (StringDefClassObject(Item.StringName, '', Item.Referenced, Item.Token, OtherLang))
+ self.OrderedStringDict[LangName][Item.StringName] = len(self.OrderedStringList[LangName]) - 1
+ return True
+
+ @staticmethod
+ def OpenUniFile(FileName):
+ #
+ # Read file
+ #
+ try:
+ UniFile = open(FileName, mode='rb')
+ FileIn = UniFile.read()
+ UniFile.close()
+ except:
+ EdkLogger.Error("build", FILE_OPEN_FAILURE, ExtraData=File)
+
+ #
+ # Detect Byte Order Mark at beginning of file. Default to UTF-8
+ #
+ Encoding = 'utf-8'
+ if (FileIn.startswith(codecs.BOM_UTF16_BE) or
+ FileIn.startswith(codecs.BOM_UTF16_LE)):
+ Encoding = 'utf-16'
+
+ UniFileClassObject.VerifyUcs2Data(FileIn, FileName, Encoding)
+
+ UniFile = BytesIO(FileIn)
+ Info = codecs.lookup(Encoding)
+ (Reader, Writer) = (Info.streamreader, Info.streamwriter)
+ return codecs.StreamReaderWriter(UniFile, Reader, Writer)
+
+ @staticmethod
+ def VerifyUcs2Data(FileIn, FileName, Encoding):
+ Ucs2Info = codecs.lookup('ucs-2')
+ #
+ # Convert to unicode
+ #
+ try:
+ FileDecoded = codecs.decode(FileIn, Encoding)
+ Ucs2Info.encode(FileDecoded)
+ except:
+ UniFile = BytesIO(FileIn)
+ Info = codecs.lookup(Encoding)
+ (Reader, Writer) = (Info.streamreader, Info.streamwriter)
+ File = codecs.StreamReaderWriter(UniFile, Reader, Writer)
+ LineNumber = 0
+ ErrMsg = lambda Encoding, LineNumber: \
+ '%s contains invalid %s characters on line %d.' % \
+ (FileName, Encoding, LineNumber)
+ while True:
+ LineNumber = LineNumber + 1
+ try:
+ Line = File.readline()
+ if Line == '':
+ EdkLogger.error('Unicode File Parser', PARSER_ERROR,
+ ErrMsg(Encoding, LineNumber))
+ Ucs2Info.encode(Line)
+ except:
+ EdkLogger.error('Unicode File Parser', PARSER_ERROR,
+ ErrMsg('UCS-2', LineNumber))
+
+ #
+ # Get String name and value
+ #
+ def GetStringObject(self, Item):
+ Language = ''
+ Value = ''
+
+ Name = Item.split()[1]
+ # Check the string name
+ if Name != '':
+ MatchString = gIdentifierPattern.match(Name)
+ if MatchString is None:
+ EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
+ LanguageList = Item.split(u'#language ')
+ for IndexI in range(len(LanguageList)):
+ if IndexI == 0:
+ continue
+ else:
+ Language = LanguageList[IndexI].split()[0]
+ Value = LanguageList[IndexI][LanguageList[IndexI].find(u'\"') + len(u'\"') : LanguageList[IndexI].rfind(u'\"')] #.replace(u'\r\n', u'')
+ Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
+ self.AddStringToList(Name, Language, Value)
+
+ #
+ # Get include file list and load them
+ #
+ def GetIncludeFile(self, Item, Dir):
+ FileName = Item[Item.find(u'#include ') + len(u'#include ') :Item.find(u' ', len(u'#include '))][1:-1]
+ self.LoadUniFile(FileName)
+
+ #
+ # Pre-process before parse .uni file
+ #
+ def PreProcess(self, File):
+ try:
+ FileIn = UniFileClassObject.OpenUniFile(LongFilePath(File.Path))
+ except UnicodeError as X:
+ EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File.Path);
+ except OSError:
+ EdkLogger.error("Unicode File Parser", FILE_NOT_FOUND, ExtraData=File.Path)
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File.Path);
+
+ Lines = []
+ #
+ # Use unique identifier
+ #
+ for Line in FileIn:
+ Line = Line.strip()
+ Line = Line.replace(u'\\\\', BACK_SLASH_PLACEHOLDER)
+ Line = StripComments(Line)
+
+ #
+ # Ignore empty line
+ #
+ if len(Line) == 0:
+ continue
+
+
+ Line = Line.replace(u'/langdef', u'#langdef')
+ Line = Line.replace(u'/string', u'#string')
+ Line = Line.replace(u'/language', u'#language')
+ Line = Line.replace(u'/include', u'#include')
+
+ Line = Line.replace(UNICODE_WIDE_CHAR, WIDE_CHAR)
+ Line = Line.replace(UNICODE_NARROW_CHAR, NARROW_CHAR)
+ Line = Line.replace(UNICODE_NON_BREAKING_CHAR, NON_BREAKING_CHAR)
+
+ Line = Line.replace(u'\\r\\n', CR + LF)
+ Line = Line.replace(u'\\n', CR + LF)
+ Line = Line.replace(u'\\r', CR)
+ Line = Line.replace(u'\\t', u' ')
+ Line = Line.replace(u'\t', u' ')
+ Line = Line.replace(u'\\"', u'"')
+ Line = Line.replace(u"\\'", u"'")
+ Line = Line.replace(BACK_SLASH_PLACEHOLDER, u'\\')
+
+ StartPos = Line.find(u'\\x')
+ while (StartPos != -1):
+ EndPos = Line.find(u'\\', StartPos + 1, StartPos + 7)
+ if EndPos != -1 and EndPos - StartPos == 6 :
+ if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE):
+ EndStr = Line[EndPos: ]
+ UniStr = Line[StartPos + 2: EndPos]
+ if EndStr.startswith(u'\\x') and len(EndStr) >= 7:
+ if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE):
+ Line = Line[0 : StartPos] + UniStr + EndStr
+ else:
+ Line = Line[0 : StartPos] + UniStr + EndStr[1:]
+ StartPos = Line.find(u'\\x', StartPos + 1)
+
+ IncList = gIncludePattern.findall(Line)
+ if len(IncList) == 1:
+ for Dir in [File.Dir] + self.IncludePathList:
+ IncFile = PathClass(str(IncList[0]), Dir)
+ if os.path.isfile(IncFile.Path):
+ Lines.extend(self.PreProcess(IncFile))
+ break
+ else:
+ EdkLogger.error("Unicode File Parser", FILE_NOT_FOUND, Message="Cannot find include file", ExtraData=str(IncList[0]))
+ continue
+
+ Lines.append(Line)
+
+ return Lines
+
+ #
+ # Load a .uni file
+ #
+ def LoadUniFile(self, File = None):
+ if File is None:
+ EdkLogger.error("Unicode File Parser", PARSER_ERROR, 'No unicode file is given')
+ self.File = File
+ #
+ # Process special char in file
+ #
+ Lines = self.PreProcess(File)
+
+ #
+ # Get Unicode Information
+ #
+ for IndexI in range(len(Lines)):
+ Line = Lines[IndexI]
+ if (IndexI + 1) < len(Lines):
+ SecondLine = Lines[IndexI + 1]
+ if (IndexI + 2) < len(Lines):
+ ThirdLine = Lines[IndexI + 2]
+
+ #
+ # Get Language def information
+ #
+ if Line.find(u'#langdef ') >= 0:
+ self.GetLangDef(File, Line)
+ continue
+
+ Name = ''
+ Language = ''
+ Value = ''
+ #
+ # Get string def information format 1 as below
+ #
+ # #string MY_STRING_1
+ # #language eng
+ # My first English string line 1
+ # My first English string line 2
+ # #string MY_STRING_1
+ # #language spa
+ # Mi segunda secuencia 1
+ # Mi segunda secuencia 2
+ #
+ if Line.find(u'#string ') >= 0 and Line.find(u'#language ') < 0 and \
+ SecondLine.find(u'#string ') < 0 and SecondLine.find(u'#language ') >= 0 and \
+ ThirdLine.find(u'#string ') < 0 and ThirdLine.find(u'#language ') < 0:
+ Name = Line[Line.find(u'#string ') + len(u'#string ') : ].strip(' ')
+ Language = SecondLine[SecondLine.find(u'#language ') + len(u'#language ') : ].strip(' ')
+ for IndexJ in range(IndexI + 2, len(Lines)):
+ if Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') < 0:
+ Value = Value + Lines[IndexJ]
+ else:
+ IndexI = IndexJ
+ break
+ # Value = Value.replace(u'\r\n', u'')
+ Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
+ # Check the string name
+ if not self.IsCompatibleMode and Name != '':
+ MatchString = gIdentifierPattern.match(Name)
+ if MatchString is None:
+ EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
+ self.AddStringToList(Name, Language, Value)
+ continue
+
+ #
+ # Get string def information format 2 as below
+ #
+ # #string MY_STRING_1 #language eng "My first English string line 1"
+ # "My first English string line 2"
+ # #language spa "Mi segunda secuencia 1"
+ # "Mi segunda secuencia 2"
+ # #string MY_STRING_2 #language eng "My first English string line 1"
+ # "My first English string line 2"
+ # #string MY_STRING_2 #language spa "Mi segunda secuencia 1"
+ # "Mi segunda secuencia 2"
+ #
+ if Line.find(u'#string ') >= 0 and Line.find(u'#language ') >= 0:
+ StringItem = Line
+ for IndexJ in range(IndexI + 1, len(Lines)):
+ if Lines[IndexJ].find(u'#string ') >= 0 and Lines[IndexJ].find(u'#language ') >= 0:
+ IndexI = IndexJ
+ break
+ elif Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') >= 0:
+ StringItem = StringItem + Lines[IndexJ]
+ elif Lines[IndexJ].count(u'\"') >= 2:
+ StringItem = StringItem[ : StringItem.rfind(u'\"')] + Lines[IndexJ][Lines[IndexJ].find(u'\"') + len(u'\"') : ]
+ self.GetStringObject(StringItem)
+ continue
+
+ #
+ # Load multiple .uni files
+ #
+ def LoadUniFiles(self, FileList):
+ if len(FileList) > 0:
+ for File in FileList:
+ self.LoadUniFile(File)
+
+ #
+ # Add a string to list
+ #
+ def AddStringToList(self, Name, Language, Value, Token = None, Referenced = False, UseOtherLangDef = '', Index = -1):
+ for LangNameItem in self.LanguageDef:
+ if Language == LangNameItem[0]:
+ break
+ else:
+ EdkLogger.error('Unicode File Parser', FORMAT_NOT_SUPPORTED, "The language '%s' for %s is not defined in Unicode file %s." \
+ % (Language, Name, self.File))
+
+ if Language not in self.OrderedStringList:
+ self.OrderedStringList[Language] = []
+ self.OrderedStringDict[Language] = {}
+
+ IsAdded = True
+ if Name in self.OrderedStringDict[Language]:
+ IsAdded = False
+ if Value is not None:
+ ItemIndexInList = self.OrderedStringDict[Language][Name]
+ Item = self.OrderedStringList[Language][ItemIndexInList]
+ Item.UpdateValue(Value)
+ Item.UseOtherLangDef = ''
+
+ if IsAdded:
+ Token = len(self.OrderedStringList[Language])
+ if Index == -1:
+ self.OrderedStringList[Language].append(StringDefClassObject(Name, Value, Referenced, Token, UseOtherLangDef))
+ self.OrderedStringDict[Language][Name] = Token
+ for LangName in self.LanguageDef:
+ #
+ # New STRING token will be added into all language string lists.
+ # so that the unique STRING identifier is reserved for all languages in the package list.
+ #
+ if LangName[0] != Language:
+ if UseOtherLangDef != '':
+ OtherLangDef = UseOtherLangDef
+ else:
+ OtherLangDef = Language
+ self.OrderedStringList[LangName[0]].append(StringDefClassObject(Name, '', Referenced, Token, OtherLangDef))
+ self.OrderedStringDict[LangName[0]][Name] = len(self.OrderedStringList[LangName[0]]) - 1
+ else:
+ self.OrderedStringList[Language].insert(Index, StringDefClassObject(Name, Value, Referenced, Token, UseOtherLangDef))
+ self.OrderedStringDict[Language][Name] = Index
+
+ #
+ # Set the string as referenced
+ #
+ def SetStringReferenced(self, Name):
+ #
+ # String stoken are added in the same order in all language string lists.
+ # So, only update the status of string stoken in first language string list.
+ #
+ Lang = self.LanguageDef[0][0]
+ if Name in self.OrderedStringDict[Lang]:
+ ItemIndexInList = self.OrderedStringDict[Lang][Name]
+ Item = self.OrderedStringList[Lang][ItemIndexInList]
+ Item.Referenced = True
+
+ #
+ # Search the string in language definition by Name
+ #
+ def FindStringValue(self, Name, Lang):
+ if Name in self.OrderedStringDict[Lang]:
+ ItemIndexInList = self.OrderedStringDict[Lang][Name]
+ return self.OrderedStringList[Lang][ItemIndexInList]
+
+ return None
+
+ #
+ # Search the string in language definition by Token
+ #
+ def FindByToken(self, Token, Lang):
+ for Item in self.OrderedStringList[Lang]:
+ if Item.Token == Token:
+ return Item
+
+ return None
+
+ #
+ # Re-order strings and re-generate tokens
+ #
+ def ReToken(self):
+ #
+ # Retoken all language strings according to the status of string stoken in the first language string.
+ #
+ FirstLangName = self.LanguageDef[0][0]
+
+ # Convert the OrderedStringList to be OrderedStringListByToken in order to faciliate future search by token
+ for LangNameItem in self.LanguageDef:
+ self.OrderedStringListByToken[LangNameItem[0]] = {}
+
+ #
+ # Use small token for all referred string stoken.
+ #
+ RefToken = 0
+ for Index in range (0, len (self.OrderedStringList[FirstLangName])):
+ FirstLangItem = self.OrderedStringList[FirstLangName][Index]
+ if FirstLangItem.Referenced == True:
+ for LangNameItem in self.LanguageDef:
+ LangName = LangNameItem[0]
+ OtherLangItem = self.OrderedStringList[LangName][Index]
+ OtherLangItem.Referenced = True
+ OtherLangItem.Token = RefToken
+ self.OrderedStringListByToken[LangName][OtherLangItem.Token] = OtherLangItem
+ RefToken = RefToken + 1
+
+ #
+ # Use big token for all unreferred string stoken.
+ #
+ UnRefToken = 0
+ for Index in range (0, len (self.OrderedStringList[FirstLangName])):
+ FirstLangItem = self.OrderedStringList[FirstLangName][Index]
+ if FirstLangItem.Referenced == False:
+ for LangNameItem in self.LanguageDef:
+ LangName = LangNameItem[0]
+ OtherLangItem = self.OrderedStringList[LangName][Index]
+ OtherLangItem.Token = RefToken + UnRefToken
+ self.OrderedStringListByToken[LangName][OtherLangItem.Token] = OtherLangItem
+ UnRefToken = UnRefToken + 1
+
+ #
+ # Show the instance itself
+ #
+ def ShowMe(self):
+ print(self.LanguageDef)
+ #print self.OrderedStringList
+ for Item in self.OrderedStringList:
+ print(Item)
+ for Member in self.OrderedStringList[Item]:
+ print(str(Member))
+
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+ a = UniFileClassObject([PathClass("C:\\Edk\\Strings.uni"), PathClass("C:\\Edk\\Strings2.uni")])
+ a.ReToken()
+ a.ShowMe()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py
new file mode 100755
index 00000000..7681d48e
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py
@@ -0,0 +1,280 @@
+# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+#
+# This file is used to collect the Variable checking information
+#
+
+# #
+# Import Modules
+#
+import os
+from Common.RangeExpression import RangeExpression
+from Common.Misc import *
+from io import BytesIO
+from struct import pack
+from Common.DataType import *
+
+class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
+ def __init__(self):
+ self.var_check_info = []
+
+ def push_back(self, var_check_tab):
+ for tab in self.var_check_info:
+ if tab.equal(var_check_tab):
+ tab.merge(var_check_tab)
+ break
+ else:
+ self.var_check_info.append(var_check_tab)
+
+ def dump(self, dest, Phase):
+
+ if not os.path.isabs(dest):
+ return
+ if not os.path.exists(dest):
+ os.mkdir(dest)
+ BinFileName = "PcdVarCheck.bin"
+ BinFilePath = os.path.join(dest, BinFileName)
+ Buffer = bytearray()
+ index = 0
+ for var_check_tab in self.var_check_info:
+ index += 1
+ realLength = 0
+ realLength += 32
+ Name = var_check_tab.Name[1:-1]
+ NameChars = Name.split(",")
+ realLength += len(NameChars)
+ if (index < len(self.var_check_info) and realLength % 4) or (index == len(self.var_check_info) and len(var_check_tab.validtab) > 0 and realLength % 4):
+ realLength += (4 - (realLength % 4))
+ itemIndex = 0
+ for item in var_check_tab.validtab:
+ itemIndex += 1
+ realLength += 5
+ for v_data in item.data:
+ if isinstance(v_data, int):
+ realLength += item.StorageWidth
+ else:
+ realLength += item.StorageWidth
+ realLength += item.StorageWidth
+ if (index == len(self.var_check_info)) :
+ if (itemIndex < len(var_check_tab.validtab)) and realLength % 4:
+ realLength += (4 - (realLength % 4))
+ else:
+ if realLength % 4:
+ realLength += (4 - (realLength % 4))
+ var_check_tab.Length = realLength
+ realLength = 0
+ index = 0
+ for var_check_tab in self.var_check_info:
+ index += 1
+
+ b = pack("=H", var_check_tab.Revision)
+ Buffer += b
+ realLength += 2
+
+ b = pack("=H", var_check_tab.HeaderLength)
+ Buffer += b
+ realLength += 2
+
+ b = pack("=L", var_check_tab.Length)
+ Buffer += b
+ realLength += 4
+
+ b = pack("=B", var_check_tab.Type)
+ Buffer += b
+ realLength += 1
+
+ for i in range(0, 3):
+ b = pack("=B", var_check_tab.Reserved)
+ Buffer += b
+ realLength += 1
+
+ b = pack("=L", var_check_tab.Attributes)
+ Buffer += b
+ realLength += 4
+
+ Guid = var_check_tab.Guid
+ b = PackByteFormatGUID(Guid)
+ Buffer += b
+ realLength += 16
+
+ Name = var_check_tab.Name[1:-1]
+ NameChars = Name.split(",")
+ for NameChar in NameChars:
+ NameCharNum = int(NameChar, 16)
+ b = pack("=B", NameCharNum)
+ Buffer += b
+ realLength += 1
+
+ if (index < len(self.var_check_info) and realLength % 4) or (index == len(self.var_check_info) and len(var_check_tab.validtab) > 0 and realLength % 4):
+ for i in range(4 - (realLength % 4)):
+ b = pack("=B", var_check_tab.pad)
+ Buffer += b
+ realLength += 1
+ itemIndex = 0
+ for item in var_check_tab.validtab:
+ itemIndex += 1
+
+ b = pack("=B", item.Type)
+ Buffer += b
+ realLength += 1
+
+ b = pack("=B", item.Length)
+ Buffer += b
+ realLength += 1
+
+ b = pack("=H", int(item.VarOffset, 16))
+ Buffer += b
+ realLength += 2
+
+ b = pack("=B", item.StorageWidth)
+ Buffer += b
+ realLength += 1
+ for v_data in item.data:
+ if isinstance(v_data, int):
+ b = pack(PACK_CODE_BY_SIZE[item.StorageWidth], v_data)
+ Buffer += b
+ realLength += item.StorageWidth
+ else:
+ b = pack(PACK_CODE_BY_SIZE[item.StorageWidth], v_data[0])
+ Buffer += b
+ realLength += item.StorageWidth
+ b = pack(PACK_CODE_BY_SIZE[item.StorageWidth], v_data[1])
+ Buffer += b
+ realLength += item.StorageWidth
+
+ if (index == len(self.var_check_info)) :
+ if (itemIndex < len(var_check_tab.validtab)) and realLength % 4:
+ for i in range(4 - (realLength % 4)):
+ b = pack("=B", var_check_tab.pad)
+ Buffer += b
+ realLength += 1
+ else:
+ if realLength % 4:
+ for i in range(4 - (realLength % 4)):
+ b = pack("=B", var_check_tab.pad)
+ Buffer += b
+ realLength += 1
+
+ DbFile = BytesIO()
+ if Phase == 'DXE' and os.path.exists(BinFilePath):
+ BinFile = open(BinFilePath, "rb")
+ BinBuffer = BinFile.read()
+ BinFile.close()
+ BinBufferSize = len(BinBuffer)
+ if (BinBufferSize % 4):
+ for i in range(4 - (BinBufferSize % 4)):
+ b = pack("=B", VAR_CHECK_PCD_VARIABLE_TAB.pad)
+ BinBuffer += b
+ Buffer = BinBuffer + Buffer
+ DbFile.write(Buffer)
+ SaveFileOnChange(BinFilePath, DbFile.getvalue(), True)
+
+
+class VAR_CHECK_PCD_VARIABLE_TAB(object):
+ pad = 0xDA
+ def __init__(self, TokenSpaceGuid, PcdCName):
+ self.Revision = 0x0001
+ self.HeaderLength = 0
+ self.Length = 0 # Length include this header
+ self.Type = 0
+ self.Reserved = 0
+ self.Attributes = 0x00000000
+ self.Guid = eval("[" + TokenSpaceGuid.replace("{", "").replace("}", "") + "]")
+ self.Name = PcdCName
+ self.validtab = []
+
+ def UpdateSize(self):
+ self.HeaderLength = 32 + len(self.Name.split(","))
+ self.Length = 32 + len(self.Name.split(",")) + self.GetValidTabLen()
+
+ def GetValidTabLen(self):
+ validtablen = 0
+ for item in self.validtab:
+ validtablen += item.Length
+ return validtablen
+
+ def SetAttributes(self, attributes):
+ self.Attributes = attributes
+
+ def push_back(self, valid_obj):
+ if valid_obj is not None:
+ self.validtab.append(valid_obj)
+
+ def equal(self, varchecktab):
+ if self.Guid == varchecktab.Guid and self.Name == varchecktab.Name:
+ return True
+ else:
+ return False
+
+ def merge(self, varchecktab):
+ for validobj in varchecktab.validtab:
+ if validobj in self.validtab:
+ continue
+ self.validtab.append(validobj)
+ self.UpdateSize()
+
+
+class VAR_CHECK_PCD_VALID_OBJ(object):
+ def __init__(self, VarOffset, data, PcdDataType):
+ self.Type = 1
+ self.Length = 0 # Length include this header
+ self.VarOffset = VarOffset
+ self.PcdDataType = PcdDataType.strip()
+ self.rawdata = data
+ self.data = set()
+ try:
+ self.StorageWidth = MAX_SIZE_TYPE[self.PcdDataType]
+ self.ValidData = True
+ except:
+ self.StorageWidth = 0
+ self.ValidData = False
+
+ def __eq__(self, validObj):
+ return validObj and self.VarOffset == validObj.VarOffset
+
+class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
+ def __init__(self, VarOffset, validlist, PcdDataType):
+ super(VAR_CHECK_PCD_VALID_LIST, self).__init__(VarOffset, validlist, PcdDataType)
+ self.Type = 1
+ valid_num_list = []
+ for item in self.rawdata:
+ valid_num_list.extend(item.split(','))
+
+ for valid_num in valid_num_list:
+ valid_num = valid_num.strip()
+
+ if valid_num.startswith('0x') or valid_num.startswith('0X'):
+ self.data.add(int(valid_num, 16))
+ else:
+ self.data.add(int(valid_num))
+
+
+ self.Length = 5 + len(self.data) * self.StorageWidth
+
+
+class VAR_CHECK_PCD_VALID_RANGE(VAR_CHECK_PCD_VALID_OBJ):
+ def __init__(self, VarOffset, validrange, PcdDataType):
+ super(VAR_CHECK_PCD_VALID_RANGE, self).__init__(VarOffset, validrange, PcdDataType)
+ self.Type = 2
+ RangeExpr = ""
+ i = 0
+ for item in self.rawdata:
+ if i == 0:
+ RangeExpr = "( " + item + " )"
+ else:
+ RangeExpr = RangeExpr + "OR ( " + item + " )"
+ range_result = RangeExpression(RangeExpr, self.PcdDataType)(True)
+ for rangelist in range_result:
+ for obj in rangelist.pop():
+ self.data.add((obj.start, obj.end))
+ self.Length = 5 + len(self.data) * 2 * self.StorageWidth
+
+
+def GetValidationObject(PcdClass, VarOffset):
+ if PcdClass.validateranges:
+ return VAR_CHECK_PCD_VALID_RANGE(VarOffset, PcdClass.validateranges, PcdClass.DatumType)
+ if PcdClass.validlists:
+ return VAR_CHECK_PCD_VALID_LIST(VarOffset, PcdClass.validlists, PcdClass.DatumType)
+ else:
+ return None
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/WorkspaceAutoGen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/WorkspaceAutoGen.py
new file mode 100755
index 00000000..93624d0f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/WorkspaceAutoGen.py
@@ -0,0 +1,971 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+import os.path as path
+import hashlib
+from collections import defaultdict
+from GenFds.FdfParser import FdfParser
+from Workspace.WorkspaceCommon import GetModuleLibInstances
+from AutoGen import GenMake
+from AutoGen.AutoGen import AutoGen
+from AutoGen.PlatformAutoGen import PlatformAutoGen
+from AutoGen.BuildEngine import gDefaultBuildRuleFile
+from Common.ToolDefClassObject import gDefaultToolsDefFile
+from Common.StringUtils import NormPath
+from Common.BuildToolError import *
+from Common.DataType import *
+from Common.Misc import *
+import json
+
+## Regular expression for splitting Dependency Expression string into tokens
+gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")
+
+## Regular expression for match: PCD(xxxx.yyy)
+gPCDAsGuidPattern = re.compile(r"^PCD\(.+\..+\)$")
+
+## Workspace AutoGen class
+#
+# This class is used mainly to control the whole platform build for different
+# architecture. This class will generate top level makefile.
+#
+class WorkspaceAutoGen(AutoGen):
+ # call super().__init__ then call the worker function with different parameter count
+ def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+ if not hasattr(self, "_Init"):
+ self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
+ self._Init = True
+
+ ## Initialize WorkspaceAutoGen
+ #
+ # @param WorkspaceDir Root directory of workspace
+ # @param ActivePlatform Meta-file of active platform
+ # @param Target Build target
+ # @param Toolchain Tool chain name
+ # @param ArchList List of architecture of current build
+ # @param MetaFileDb Database containing meta-files
+ # @param BuildConfig Configuration of build
+ # @param ToolDefinition Tool chain definitions
+ # @param FlashDefinitionFile File of flash definition
+ # @param Fds FD list to be generated
+ # @param Fvs FV list to be generated
+ # @param Caps Capsule list to be generated
+ # @param SkuId SKU id from command line
+ #
+ def _InitWorker(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb,
+ BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None, Fvs=None, Caps=None, SkuId='', UniFlag=None,
+ Progress=None, BuildModule=None):
+ self.BuildDatabase = MetaFileDb
+ self.MetaFile = ActivePlatform
+ self.WorkspaceDir = WorkspaceDir
+ self.Platform = self.BuildDatabase[self.MetaFile, TAB_ARCH_COMMON, Target, Toolchain]
+ GlobalData.gActivePlatform = self.Platform
+ self.BuildTarget = Target
+ self.ToolChain = Toolchain
+ self.ArchList = ArchList
+ self.SkuId = SkuId
+ self.UniFlag = UniFlag
+
+ self.TargetTxt = BuildConfig
+ self.ToolDef = ToolDefinition
+ self.FdfFile = FlashDefinitionFile
+ self.FdTargetList = Fds if Fds else []
+ self.FvTargetList = Fvs if Fvs else []
+ self.CapTargetList = Caps if Caps else []
+ self.AutoGenObjectList = []
+ self._GuidDict = {}
+
+ # there's many relative directory operations, so ...
+ os.chdir(self.WorkspaceDir)
+
+ self.MergeArch()
+ self.ValidateBuildTarget()
+
+ EdkLogger.info("")
+ if self.ArchList:
+ EdkLogger.info('%-16s = %s' % ("Architecture(s)", ' '.join(self.ArchList)))
+ EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget))
+ EdkLogger.info('%-16s = %s' % ("Toolchain", self.ToolChain))
+
+ EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform))
+ if BuildModule:
+ EdkLogger.info('%-24s = %s' % ("Active Module", BuildModule))
+
+ if self.FdfFile:
+ EdkLogger.info('%-24s = %s' % ("Flash Image Definition", self.FdfFile))
+
+ EdkLogger.verbose("\nFLASH_DEFINITION = %s" % self.FdfFile)
+
+ if Progress:
+ Progress.Start("\nProcessing meta-data")
+ #
+ # Mark now build in AutoGen Phase
+ #
+ #
+ # Collect Platform Guids to support Guid name in Fdfparser.
+ #
+ self.CollectPlatformGuids()
+ GlobalData.gAutoGenPhase = True
+ self.ProcessModuleFromPdf()
+ self.ProcessPcdType()
+ self.ProcessMixedPcd()
+ self.VerifyPcdsFromFDF()
+ self.CollectAllPcds()
+ for Pa in self.AutoGenObjectList:
+ Pa.FillData_LibConstPcd()
+ self.GeneratePkgLevelHash()
+ #
+ # Check PCDs token value conflict in each DEC file.
+ #
+ self._CheckAllPcdsTokenValueConflict()
+ #
+ # Check PCD type and definition between DSC and DEC
+ #
+ self._CheckPcdDefineAndType()
+
+ self.CreateBuildOptionsFile()
+ self.CreatePcdTokenNumberFile()
+ self.GeneratePlatformLevelHash()
+
+ #
+ # Merge Arch
+ #
+ def MergeArch(self):
+ if not self.ArchList:
+ ArchList = set(self.Platform.SupArchList)
+ else:
+ ArchList = set(self.ArchList) & set(self.Platform.SupArchList)
+ if not ArchList:
+ EdkLogger.error("build", PARAMETER_INVALID,
+ ExtraData = "Invalid ARCH specified. [Valid ARCH: %s]" % (" ".join(self.Platform.SupArchList)))
+ elif self.ArchList and len(ArchList) != len(self.ArchList):
+ SkippedArchList = set(self.ArchList).symmetric_difference(set(self.Platform.SupArchList))
+ EdkLogger.verbose("\nArch [%s] is ignored because the platform supports [%s] only!"
+ % (" ".join(SkippedArchList), " ".join(self.Platform.SupArchList)))
+ self.ArchList = tuple(ArchList)
+
+ # Validate build target
+ def ValidateBuildTarget(self):
+ if self.BuildTarget not in self.Platform.BuildTargets:
+ EdkLogger.error("build", PARAMETER_INVALID,
+ ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"
+ % (self.BuildTarget, " ".join(self.Platform.BuildTargets)))
+
+ def CollectPlatformGuids(self):
+ oriInfList = []
+ oriPkgSet = set()
+ PlatformPkg = set()
+ for Arch in self.ArchList:
+ Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ oriInfList = Platform.Modules
+ for ModuleFile in oriInfList:
+ ModuleData = self.BuildDatabase[ModuleFile, Platform._Arch, Platform._Target, Platform._Toolchain]
+ oriPkgSet.update(ModuleData.Packages)
+ for Pkg in oriPkgSet:
+ Guids = Pkg.Guids
+ GlobalData.gGuidDict.update(Guids)
+ if Platform.Packages:
+ PlatformPkg.update(Platform.Packages)
+ for Pkg in PlatformPkg:
+ Guids = Pkg.Guids
+ GlobalData.gGuidDict.update(Guids)
+
+ @cached_property
+ def FdfProfile(self):
+ if not self.FdfFile:
+ self.FdfFile = self.Platform.FlashDefinition
+
+ FdfProfile = None
+ if self.FdfFile:
+ Fdf = FdfParser(self.FdfFile.Path)
+ Fdf.ParseFile()
+ GlobalData.gFdfParser = Fdf
+ if Fdf.CurrentFdName and Fdf.CurrentFdName in Fdf.Profile.FdDict:
+ FdDict = Fdf.Profile.FdDict[Fdf.CurrentFdName]
+ for FdRegion in FdDict.RegionList:
+ if str(FdRegion.RegionType) == 'FILE' and self.Platform.VpdToolGuid in str(FdRegion.RegionDataList):
+ if int(FdRegion.Offset) % 8 != 0:
+ EdkLogger.error("build", FORMAT_INVALID, 'The VPD Base Address %s must be 8-byte aligned.' % (FdRegion.Offset))
+ FdfProfile = Fdf.Profile
+ else:
+ if self.FdTargetList:
+ EdkLogger.info("No flash definition file found. FD [%s] will be ignored." % " ".join(self.FdTargetList))
+ self.FdTargetList = []
+ if self.FvTargetList:
+ EdkLogger.info("No flash definition file found. FV [%s] will be ignored." % " ".join(self.FvTargetList))
+ self.FvTargetList = []
+ if self.CapTargetList:
+ EdkLogger.info("No flash definition file found. Capsule [%s] will be ignored." % " ".join(self.CapTargetList))
+ self.CapTargetList = []
+
+ return FdfProfile
+
+ def ProcessModuleFromPdf(self):
+
+ if self.FdfProfile:
+ for fvname in self.FvTargetList:
+ if fvname.upper() not in self.FdfProfile.FvDict:
+ EdkLogger.error("build", OPTION_VALUE_INVALID,
+ "No such an FV in FDF file: %s" % fvname)
+
+ # In DSC file may use FILE_GUID to override the module, then in the Platform.Modules use FILE_GUIDmodule.inf as key,
+ # but the path (self.MetaFile.Path) is the real path
+ for key in self.FdfProfile.InfDict:
+ if key == 'ArchTBD':
+ MetaFile_cache = defaultdict(set)
+ for Arch in self.ArchList:
+ Current_Platform_cache = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ for Pkey in Current_Platform_cache.Modules:
+ MetaFile_cache[Arch].add(Current_Platform_cache.Modules[Pkey].MetaFile)
+ for Inf in self.FdfProfile.InfDict[key]:
+ ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
+ for Arch in self.ArchList:
+ if ModuleFile in MetaFile_cache[Arch]:
+ break
+ else:
+ ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]
+ if not ModuleData.IsBinaryModule:
+ EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)
+
+ else:
+ for Arch in self.ArchList:
+ if Arch == key:
+ Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ MetaFileList = set()
+ for Pkey in Platform.Modules:
+ MetaFileList.add(Platform.Modules[Pkey].MetaFile)
+ for Inf in self.FdfProfile.InfDict[key]:
+ ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
+ if ModuleFile in MetaFileList:
+ continue
+ ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]
+ if not ModuleData.IsBinaryModule:
+ EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)
+
+
+
+ # parse FDF file to get PCDs in it, if any
+ def VerifyPcdsFromFDF(self):
+
+ if self.FdfProfile:
+ PcdSet = self.FdfProfile.PcdDict
+ self.VerifyPcdDeclearation(PcdSet)
+
+ def ProcessPcdType(self):
+ for Arch in self.ArchList:
+ Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ Platform.Pcds
+ # generate the SourcePcdDict and BinaryPcdDict
+ Libs = []
+ for BuildData in list(self.BuildDatabase._CACHE_.values()):
+ if BuildData.Arch != Arch:
+ continue
+ if BuildData.MetaFile.Ext == '.inf' and str(BuildData) in Platform.Modules :
+ Libs.extend(GetModuleLibInstances(BuildData, Platform,
+ self.BuildDatabase,
+ Arch,
+ self.BuildTarget,
+ self.ToolChain,
+ self.Platform.MetaFile,
+ EdkLogger
+ ))
+ for BuildData in list(self.BuildDatabase._CACHE_.values()):
+ if BuildData.Arch != Arch:
+ continue
+ if BuildData.MetaFile.Ext == '.inf':
+ for key in BuildData.Pcds:
+ if BuildData.Pcds[key].Pending:
+ if key in Platform.Pcds:
+ PcdInPlatform = Platform.Pcds[key]
+ if PcdInPlatform.Type:
+ BuildData.Pcds[key].Type = PcdInPlatform.Type
+ BuildData.Pcds[key].Pending = False
+
+ if BuildData.MetaFile in Platform.Modules:
+ PlatformModule = Platform.Modules[str(BuildData.MetaFile)]
+ if key in PlatformModule.Pcds:
+ PcdInPlatform = PlatformModule.Pcds[key]
+ if PcdInPlatform.Type:
+ BuildData.Pcds[key].Type = PcdInPlatform.Type
+ BuildData.Pcds[key].Pending = False
+ else:
+ #Pcd used in Library, Pcd Type from reference module if Pcd Type is Pending
+ if BuildData.Pcds[key].Pending:
+ if bool(BuildData.LibraryClass):
+ if BuildData in set(Libs):
+ ReferenceModules = BuildData.ReferenceModules
+ for ReferenceModule in ReferenceModules:
+ if ReferenceModule.MetaFile in Platform.Modules:
+ RefPlatformModule = Platform.Modules[str(ReferenceModule.MetaFile)]
+ if key in RefPlatformModule.Pcds:
+ PcdInReferenceModule = RefPlatformModule.Pcds[key]
+ if PcdInReferenceModule.Type:
+ BuildData.Pcds[key].Type = PcdInReferenceModule.Type
+ BuildData.Pcds[key].Pending = False
+ break
+
+ def ProcessMixedPcd(self):
+ for Arch in self.ArchList:
+ SourcePcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set(),TAB_PCDS_DYNAMIC:set(),TAB_PCDS_FIXED_AT_BUILD:set()}
+ BinaryPcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set()}
+ SourcePcdDict_Keys = SourcePcdDict.keys()
+ BinaryPcdDict_Keys = BinaryPcdDict.keys()
+
+ # generate the SourcePcdDict and BinaryPcdDict
+
+ for BuildData in list(self.BuildDatabase._CACHE_.values()):
+ if BuildData.Arch != Arch:
+ continue
+ if BuildData.MetaFile.Ext == '.inf':
+ for key in BuildData.Pcds:
+ if TAB_PCDS_DYNAMIC_EX in BuildData.Pcds[key].Type:
+ if BuildData.IsBinaryModule:
+ BinaryPcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
+ else:
+ SourcePcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
+
+ elif TAB_PCDS_PATCHABLE_IN_MODULE in BuildData.Pcds[key].Type:
+ if BuildData.MetaFile.Ext == '.inf':
+ if BuildData.IsBinaryModule:
+ BinaryPcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
+ else:
+ SourcePcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
+
+ elif TAB_PCDS_DYNAMIC in BuildData.Pcds[key].Type:
+ SourcePcdDict[TAB_PCDS_DYNAMIC].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
+ elif TAB_PCDS_FIXED_AT_BUILD in BuildData.Pcds[key].Type:
+ SourcePcdDict[TAB_PCDS_FIXED_AT_BUILD].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
+
+ #
+ # A PCD can only use one type for all source modules
+ #
+ for i in SourcePcdDict_Keys:
+ for j in SourcePcdDict_Keys:
+ if i != j:
+ Intersections = SourcePcdDict[i].intersection(SourcePcdDict[j])
+ if len(Intersections) > 0:
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "Building modules from source INFs, following PCD use %s and %s access method. It must be corrected to use only one access method." % (i, j),
+ ExtraData='\n\t'.join(str(P[1]+'.'+P[0]) for P in Intersections)
+ )
+
+ #
+ # intersection the BinaryPCD for Mixed PCD
+ #
+ for i in BinaryPcdDict_Keys:
+ for j in BinaryPcdDict_Keys:
+ if i != j:
+ Intersections = BinaryPcdDict[i].intersection(BinaryPcdDict[j])
+ for item in Intersections:
+ NewPcd1 = (item[0] + '_' + i, item[1])
+ NewPcd2 = (item[0] + '_' + j, item[1])
+ if item not in GlobalData.MixedPcd:
+ GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]
+ else:
+ if NewPcd1 not in GlobalData.MixedPcd[item]:
+ GlobalData.MixedPcd[item].append(NewPcd1)
+ if NewPcd2 not in GlobalData.MixedPcd[item]:
+ GlobalData.MixedPcd[item].append(NewPcd2)
+
+ #
+ # intersection the SourcePCD and BinaryPCD for Mixed PCD
+ #
+ for i in SourcePcdDict_Keys:
+ for j in BinaryPcdDict_Keys:
+ if i != j:
+ Intersections = SourcePcdDict[i].intersection(BinaryPcdDict[j])
+ for item in Intersections:
+ NewPcd1 = (item[0] + '_' + i, item[1])
+ NewPcd2 = (item[0] + '_' + j, item[1])
+ if item not in GlobalData.MixedPcd:
+ GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]
+ else:
+ if NewPcd1 not in GlobalData.MixedPcd[item]:
+ GlobalData.MixedPcd[item].append(NewPcd1)
+ if NewPcd2 not in GlobalData.MixedPcd[item]:
+ GlobalData.MixedPcd[item].append(NewPcd2)
+
+ BuildData = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ for key in BuildData.Pcds:
+ for SinglePcd in GlobalData.MixedPcd:
+ if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) == SinglePcd:
+ for item in GlobalData.MixedPcd[SinglePcd]:
+ Pcd_Type = item[0].split('_')[-1]
+ if (Pcd_Type == BuildData.Pcds[key].Type) or (Pcd_Type == TAB_PCDS_DYNAMIC_EX and BuildData.Pcds[key].Type in PCD_DYNAMIC_EX_TYPE_SET) or \
+ (Pcd_Type == TAB_PCDS_DYNAMIC and BuildData.Pcds[key].Type in PCD_DYNAMIC_TYPE_SET):
+ Value = BuildData.Pcds[key]
+ Value.TokenCName = BuildData.Pcds[key].TokenCName + '_' + Pcd_Type
+ if len(key) == 2:
+ newkey = (Value.TokenCName, key[1])
+ elif len(key) == 3:
+ newkey = (Value.TokenCName, key[1], key[2])
+ del BuildData.Pcds[key]
+ BuildData.Pcds[newkey] = Value
+ break
+ break
+
+ if self.FdfProfile:
+ PcdSet = self.FdfProfile.PcdDict
+ # handle the mixed pcd in FDF file
+ for key in PcdSet:
+ if key in GlobalData.MixedPcd:
+ Value = PcdSet[key]
+ del PcdSet[key]
+ for item in GlobalData.MixedPcd[key]:
+ PcdSet[item] = Value
+
+ #Collect package set information from INF of FDF
+ @cached_property
+ def PkgSet(self):
+ if not self.FdfFile:
+ self.FdfFile = self.Platform.FlashDefinition
+
+ if self.FdfFile:
+ ModuleList = self.FdfProfile.InfList
+ else:
+ ModuleList = []
+ Pkgs = {}
+ for Arch in self.ArchList:
+ Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ PkgSet = set()
+ for mb in [self.BuildDatabase[m, Arch, self.BuildTarget, self.ToolChain] for m in Platform.Modules]:
+ PkgSet.update(mb.Packages)
+ for Inf in ModuleList:
+ ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
+ if ModuleFile in Platform.Modules:
+ continue
+ ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]
+ PkgSet.update(ModuleData.Packages)
+ PkgSet.update(Platform.Packages)
+ Pkgs[Arch] = list(PkgSet)
+ return Pkgs
+
+ def VerifyPcdDeclearation(self,PcdSet):
+ for Arch in self.ArchList:
+ Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ Pkgs = self.PkgSet[Arch]
+ DecPcds = set()
+ DecPcdsKey = set()
+ for Pkg in Pkgs:
+ for Pcd in Pkg.Pcds:
+ DecPcds.add((Pcd[0], Pcd[1]))
+ DecPcdsKey.add((Pcd[0], Pcd[1], Pcd[2]))
+
+ Platform.SkuName = self.SkuId
+ for Name, Guid,Fileds in PcdSet:
+ if (Name, Guid) not in DecPcds:
+ EdkLogger.error(
+ 'build',
+ PARSER_ERROR,
+ "PCD (%s.%s) used in FDF is not declared in DEC files." % (Guid, Name),
+ File = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][0],
+ Line = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][1]
+ )
+ else:
+ # Check whether Dynamic or DynamicEx PCD used in FDF file. If used, build break and give a error message.
+ if (Name, Guid, TAB_PCDS_FIXED_AT_BUILD) in DecPcdsKey \
+ or (Name, Guid, TAB_PCDS_PATCHABLE_IN_MODULE) in DecPcdsKey \
+ or (Name, Guid, TAB_PCDS_FEATURE_FLAG) in DecPcdsKey:
+ continue
+ elif (Name, Guid, TAB_PCDS_DYNAMIC) in DecPcdsKey or (Name, Guid, TAB_PCDS_DYNAMIC_EX) in DecPcdsKey:
+ EdkLogger.error(
+ 'build',
+ PARSER_ERROR,
+ "Using Dynamic or DynamicEx type of PCD [%s.%s] in FDF file is not allowed." % (Guid, Name),
+ File = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][0],
+ Line = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][1]
+ )
+ def CollectAllPcds(self):
+
+ for Arch in self.ArchList:
+ Pa = PlatformAutoGen(self, self.MetaFile, self.BuildTarget, self.ToolChain, Arch)
+ #
+ # Explicitly collect platform's dynamic PCDs
+ #
+ Pa.CollectPlatformDynamicPcds()
+ Pa.CollectFixedAtBuildPcds()
+ self.AutoGenObjectList.append(Pa)
+ # We need to calculate the PcdTokenNumber after all Arch Pcds are collected.
+ for Arch in self.ArchList:
+ #Pcd TokenNumber
+ Pa = PlatformAutoGen(self, self.MetaFile, self.BuildTarget, self.ToolChain, Arch)
+ self.UpdateModuleDataPipe(Arch, {"PCD_TNUM":Pa.PcdTokenNumber})
+
+ def UpdateModuleDataPipe(self,arch, attr_dict):
+ for (Target, Toolchain, Arch, MetaFile) in AutoGen.Cache():
+ if Arch != arch:
+ continue
+ try:
+ AutoGen.Cache()[(Target, Toolchain, Arch, MetaFile)].DataPipe.DataContainer = attr_dict
+ except Exception:
+ pass
+ #
+ # Generate Package level hash value
+ #
+ def GeneratePkgLevelHash(self):
+ for Arch in self.ArchList:
+ GlobalData.gPackageHash = {}
+ if GlobalData.gUseHashCache:
+ for Pkg in self.PkgSet[Arch]:
+ self._GenPkgLevelHash(Pkg)
+
+
+ def CreateBuildOptionsFile(self):
+ #
+ # Create BuildOptions Macro & PCD metafile, also add the Active Platform and FDF file.
+ #
+ content = 'gCommandLineDefines: '
+ content += str(GlobalData.gCommandLineDefines)
+ content += TAB_LINE_BREAK
+ content += 'BuildOptionPcd: '
+ content += str(GlobalData.BuildOptionPcd)
+ content += TAB_LINE_BREAK
+ content += 'Active Platform: '
+ content += str(self.Platform)
+ content += TAB_LINE_BREAK
+ if self.FdfFile:
+ content += 'Flash Image Definition: '
+ content += str(self.FdfFile)
+ content += TAB_LINE_BREAK
+ SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)
+
+ def CreatePcdTokenNumberFile(self):
+ #
+ # Create PcdToken Number file for Dynamic/DynamicEx Pcd.
+ #
+ PcdTokenNumber = 'PcdTokenNumber: '
+ Pa = self.AutoGenObjectList[0]
+ if Pa.PcdTokenNumber:
+ if Pa.DynamicPcdList:
+ for Pcd in Pa.DynamicPcdList:
+ PcdTokenNumber += TAB_LINE_BREAK
+ PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))
+ PcdTokenNumber += ' : '
+ PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])
+ SaveFileOnChange(os.path.join(self.BuildDir, 'PcdTokenNumber'), PcdTokenNumber, False)
+
+ def GeneratePlatformLevelHash(self):
+ #
+ # Get set of workspace metafiles
+ #
+ AllWorkSpaceMetaFiles = self._GetMetaFiles(self.BuildTarget, self.ToolChain)
+ AllWorkSpaceMetaFileList = sorted(AllWorkSpaceMetaFiles, key=lambda x: str(x))
+ #
+ # Retrieve latest modified time of all metafiles
+ #
+ SrcTimeStamp = 0
+ for f in AllWorkSpaceMetaFiles:
+ if os.stat(f)[8] > SrcTimeStamp:
+ SrcTimeStamp = os.stat(f)[8]
+ self._SrcTimeStamp = SrcTimeStamp
+
+ if GlobalData.gUseHashCache:
+ FileList = []
+ m = hashlib.md5()
+ for file in AllWorkSpaceMetaFileList:
+ if file.endswith('.dec'):
+ continue
+ f = open(file, 'rb')
+ Content = f.read()
+ f.close()
+ m.update(Content)
+ FileList.append((str(file), hashlib.md5(Content).hexdigest()))
+
+ HashDir = path.join(self.BuildDir, "Hash_Platform")
+ HashFile = path.join(HashDir, 'Platform.hash.' + m.hexdigest())
+ SaveFileOnChange(HashFile, m.hexdigest(), False)
+ HashChainFile = path.join(HashDir, 'Platform.hashchain.' + m.hexdigest())
+ GlobalData.gPlatformHashFile = HashChainFile
+ try:
+ with open(HashChainFile, 'w') as f:
+ json.dump(FileList, f, indent=2)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
+
+ if GlobalData.gBinCacheDest:
+ # Copy platform hash files to cache destination
+ FileDir = path.join(GlobalData.gBinCacheDest, self.OutputDir, self.BuildTarget + "_" + self.ToolChain, "Hash_Platform")
+ CacheFileDir = FileDir
+ CreateDirectory(CacheFileDir)
+ CopyFileOnChange(HashFile, CacheFileDir)
+ CopyFileOnChange(HashChainFile, CacheFileDir)
+
+ #
+ # Write metafile list to build directory
+ #
+ AutoGenFilePath = os.path.join(self.BuildDir, 'AutoGen')
+ if os.path.exists (AutoGenFilePath):
+ os.remove(AutoGenFilePath)
+ if not os.path.exists(self.BuildDir):
+ os.makedirs(self.BuildDir)
+ with open(os.path.join(self.BuildDir, 'AutoGen'), 'w+') as file:
+ for f in AllWorkSpaceMetaFileList:
+ print(f, file=file)
+ return True
+
+ def _GenPkgLevelHash(self, Pkg):
+ if Pkg.PackageName in GlobalData.gPackageHash:
+ return
+
+ PkgDir = os.path.join(self.BuildDir, Pkg.Arch, "Hash_Pkg", Pkg.PackageName)
+ CreateDirectory(PkgDir)
+ FileList = []
+ m = hashlib.md5()
+ # Get .dec file's hash value
+ f = open(Pkg.MetaFile.Path, 'rb')
+ Content = f.read()
+ f.close()
+ m.update(Content)
+ FileList.append((str(Pkg.MetaFile.Path), hashlib.md5(Content).hexdigest()))
+ # Get include files hash value
+ if Pkg.Includes:
+ for inc in sorted(Pkg.Includes, key=lambda x: str(x)):
+ for Root, Dirs, Files in os.walk(str(inc)):
+ for File in sorted(Files):
+ File_Path = os.path.join(Root, File)
+ f = open(File_Path, 'rb')
+ Content = f.read()
+ f.close()
+ m.update(Content)
+ FileList.append((str(File_Path), hashlib.md5(Content).hexdigest()))
+ GlobalData.gPackageHash[Pkg.PackageName] = m.hexdigest()
+
+ HashDir = PkgDir
+ HashFile = path.join(HashDir, Pkg.PackageName + '.hash.' + m.hexdigest())
+ SaveFileOnChange(HashFile, m.hexdigest(), False)
+ HashChainFile = path.join(HashDir, Pkg.PackageName + '.hashchain.' + m.hexdigest())
+ GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)] = HashChainFile
+ try:
+ with open(HashChainFile, 'w') as f:
+ json.dump(FileList, f, indent=2)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
+
+ if GlobalData.gBinCacheDest:
+ # Copy Pkg hash files to cache destination dir
+ FileDir = path.join(GlobalData.gBinCacheDest, self.OutputDir, self.BuildTarget + "_" + self.ToolChain, Pkg.Arch, "Hash_Pkg", Pkg.PackageName)
+ CacheFileDir = FileDir
+ CreateDirectory(CacheFileDir)
+ CopyFileOnChange(HashFile, CacheFileDir)
+ CopyFileOnChange(HashChainFile, CacheFileDir)
+
+ def _GetMetaFiles(self, Target, Toolchain):
+ AllWorkSpaceMetaFiles = set()
+ #
+ # add fdf
+ #
+ if self.FdfFile:
+ AllWorkSpaceMetaFiles.add (self.FdfFile.Path)
+ for f in GlobalData.gFdfParser.GetAllIncludedFile():
+ AllWorkSpaceMetaFiles.add (f.FileName)
+ #
+ # add dsc
+ #
+ AllWorkSpaceMetaFiles.add(self.MetaFile.Path)
+
+ #
+ # add build_rule.txt & tools_def.txt
+ #
+ AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultBuildRuleFile))
+ AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultToolsDefFile))
+
+ # add BuildOption metafile
+ #
+ AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'BuildOptions'))
+
+ # add PcdToken Number file for Dynamic/DynamicEx Pcd
+ #
+ AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'PcdTokenNumber'))
+
+ for Pa in self.AutoGenObjectList:
+ AllWorkSpaceMetaFiles.add(Pa.ToolDefinitionFile)
+
+ for Arch in self.ArchList:
+ #
+ # add dec
+ #
+ for Package in PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch).PackageList:
+ AllWorkSpaceMetaFiles.add(Package.MetaFile.Path)
+
+ #
+ # add included dsc
+ #
+ for filePath in self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]._RawData.IncludedFiles:
+ AllWorkSpaceMetaFiles.add(filePath.Path)
+
+ return AllWorkSpaceMetaFiles
+
+ def _CheckPcdDefineAndType(self):
+ PcdTypeSet = {TAB_PCDS_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC,
+ TAB_PCDS_DYNAMIC_EX}
+
+ # This dict store PCDs which are not used by any modules with specified arches
+ UnusedPcd = OrderedDict()
+ for Pa in self.AutoGenObjectList:
+ # Key of DSC's Pcds dictionary is PcdCName, TokenSpaceGuid
+ for Pcd in Pa.Platform.Pcds:
+ PcdType = Pa.Platform.Pcds[Pcd].Type
+
+ # If no PCD type, this PCD comes from FDF
+ if not PcdType:
+ continue
+
+ # Try to remove Hii and Vpd suffix
+ if PcdType.startswith(TAB_PCDS_DYNAMIC_EX):
+ PcdType = TAB_PCDS_DYNAMIC_EX
+ elif PcdType.startswith(TAB_PCDS_DYNAMIC):
+ PcdType = TAB_PCDS_DYNAMIC
+
+ for Package in Pa.PackageList:
+ # Key of DEC's Pcds dictionary is PcdCName, TokenSpaceGuid, PcdType
+ if (Pcd[0], Pcd[1], PcdType) in Package.Pcds:
+ break
+ for Type in PcdTypeSet:
+ if (Pcd[0], Pcd[1], Type) in Package.Pcds:
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "Type [%s] of PCD [%s.%s] in DSC file doesn't match the type [%s] defined in DEC file." \
+ % (Pa.Platform.Pcds[Pcd].Type, Pcd[1], Pcd[0], Type),
+ ExtraData=None
+ )
+ return
+ else:
+ UnusedPcd.setdefault(Pcd, []).append(Pa.Arch)
+
+ for Pcd in UnusedPcd:
+ EdkLogger.warn(
+ 'build',
+ "The PCD was not specified by any INF module in the platform for the given architecture.\n"
+ "\tPCD: [%s.%s]\n\tPlatform: [%s]\n\tArch: %s"
+ % (Pcd[1], Pcd[0], os.path.basename(str(self.MetaFile)), str(UnusedPcd[Pcd])),
+ ExtraData=None
+ )
+
+ def __repr__(self):
+ return "%s [%s]" % (self.MetaFile, ", ".join(self.ArchList))
+
+ ## Return the directory to store FV files
+ @cached_property
+ def FvDir(self):
+ return path.join(self.BuildDir, TAB_FV_DIRECTORY)
+
+ ## Return the directory to store all intermediate and final files built
+ @cached_property
+ def BuildDir(self):
+ return self.AutoGenObjectList[0].BuildDir
+
+ ## Return the build output directory platform specifies
+ @cached_property
+ def OutputDir(self):
+ return self.Platform.OutputDirectory
+
+ ## Return platform name
+ @cached_property
+ def Name(self):
+ return self.Platform.PlatformName
+
+ ## Return meta-file GUID
+ @cached_property
+ def Guid(self):
+ return self.Platform.Guid
+
+ ## Return platform version
+ @cached_property
+ def Version(self):
+ return self.Platform.Version
+
+ ## Return paths of tools
+ @cached_property
+ def ToolDefinition(self):
+ return self.AutoGenObjectList[0].ToolDefinition
+
+ ## Return directory of platform makefile
+ #
+ # @retval string Makefile directory
+ #
+ @cached_property
+ def MakeFileDir(self):
+ return self.BuildDir
+
+ ## Return build command string
+ #
+ # @retval string Build command string
+ #
+ @cached_property
+ def BuildCommand(self):
+ # BuildCommand should be all the same. So just get one from platform AutoGen
+ return self.AutoGenObjectList[0].BuildCommand
+
+ ## Check the PCDs token value conflict in each DEC file.
+ #
+ # Will cause build break and raise error message while two PCDs conflict.
+ #
+ # @return None
+ #
+ def _CheckAllPcdsTokenValueConflict(self):
+ for Pa in self.AutoGenObjectList:
+ for Package in Pa.PackageList:
+ PcdList = list(Package.Pcds.values())
+ PcdList.sort(key=lambda x: int(x.TokenValue, 0))
+ Count = 0
+ while (Count < len(PcdList) - 1) :
+ Item = PcdList[Count]
+ ItemNext = PcdList[Count + 1]
+ #
+ # Make sure in the same token space the TokenValue should be unique
+ #
+ if (int(Item.TokenValue, 0) == int(ItemNext.TokenValue, 0)):
+ SameTokenValuePcdList = []
+ SameTokenValuePcdList.append(Item)
+ SameTokenValuePcdList.append(ItemNext)
+ RemainPcdListLength = len(PcdList) - Count - 2
+ for ValueSameCount in range(RemainPcdListLength):
+ if int(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount].TokenValue, 0) == int(Item.TokenValue, 0):
+ SameTokenValuePcdList.append(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount])
+ else:
+ break;
+ #
+ # Sort same token value PCD list with TokenGuid and TokenCName
+ #
+ SameTokenValuePcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))
+ SameTokenValuePcdListCount = 0
+ while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1):
+ Flag = False
+ TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]
+ TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1]
+
+ if (TemListItem.TokenSpaceGuidCName == TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName != TemListItemNext.TokenCName):
+ for PcdItem in GlobalData.MixedPcd:
+ if (TemListItem.TokenCName, TemListItem.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem] or \
+ (TemListItemNext.TokenCName, TemListItemNext.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ Flag = True
+ if not Flag:
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "The TokenValue [%s] of PCD [%s.%s] is conflict with: [%s.%s] in %s"\
+ % (TemListItem.TokenValue, TemListItem.TokenSpaceGuidCName, TemListItem.TokenCName, TemListItemNext.TokenSpaceGuidCName, TemListItemNext.TokenCName, Package),
+ ExtraData=None
+ )
+ SameTokenValuePcdListCount += 1
+ Count += SameTokenValuePcdListCount
+ Count += 1
+
+ PcdList = list(Package.Pcds.values())
+ PcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))
+ Count = 0
+ while (Count < len(PcdList) - 1) :
+ Item = PcdList[Count]
+ ItemNext = PcdList[Count + 1]
+ #
+ # Check PCDs with same TokenSpaceGuidCName.TokenCName have same token value as well.
+ #
+ if (Item.TokenSpaceGuidCName == ItemNext.TokenSpaceGuidCName) and (Item.TokenCName == ItemNext.TokenCName) and (int(Item.TokenValue, 0) != int(ItemNext.TokenValue, 0)):
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "The TokenValue [%s] of PCD [%s.%s] in %s defined in two places should be same as well."\
+ % (Item.TokenValue, Item.TokenSpaceGuidCName, Item.TokenCName, Package),
+ ExtraData=None
+ )
+ Count += 1
+ ## Generate fds command
+ @property
+ def GenFdsCommand(self):
+ return (GenMake.TopLevelMakefile(self)._TEMPLATE_.Replace(GenMake.TopLevelMakefile(self)._TemplateDict)).strip()
+
+ @property
+ def GenFdsCommandDict(self):
+ FdsCommandDict = {}
+ LogLevel = EdkLogger.GetLevel()
+ if LogLevel == EdkLogger.VERBOSE:
+ FdsCommandDict["verbose"] = True
+ elif LogLevel <= EdkLogger.DEBUG_9:
+ FdsCommandDict["debug"] = LogLevel - 1
+ elif LogLevel == EdkLogger.QUIET:
+ FdsCommandDict["quiet"] = True
+
+ FdsCommandDict["GenfdsMultiThread"] = GlobalData.gEnableGenfdsMultiThread
+ if GlobalData.gIgnoreSource:
+ FdsCommandDict["IgnoreSources"] = True
+
+ FdsCommandDict["OptionPcd"] = []
+ for pcd in GlobalData.BuildOptionPcd:
+ if pcd[2]:
+ pcdname = '.'.join(pcd[0:3])
+ else:
+ pcdname = '.'.join(pcd[0:2])
+ if pcd[3].startswith('{'):
+ FdsCommandDict["OptionPcd"].append(pcdname + '=' + 'H' + '"' + pcd[3] + '"')
+ else:
+ FdsCommandDict["OptionPcd"].append(pcdname + '=' + pcd[3])
+
+ MacroList = []
+ # macros passed to GenFds
+ MacroDict = {}
+ MacroDict.update(GlobalData.gGlobalDefines)
+ MacroDict.update(GlobalData.gCommandLineDefines)
+ for MacroName in MacroDict:
+ if MacroDict[MacroName] != "":
+ MacroList.append('"%s=%s"' % (MacroName, MacroDict[MacroName].replace('\\', '\\\\')))
+ else:
+ MacroList.append('"%s"' % MacroName)
+ FdsCommandDict["macro"] = MacroList
+
+ FdsCommandDict["fdf_file"] = [self.FdfFile]
+ FdsCommandDict["build_target"] = self.BuildTarget
+ FdsCommandDict["toolchain_tag"] = self.ToolChain
+ FdsCommandDict["active_platform"] = str(self)
+
+ FdsCommandDict["conf_directory"] = GlobalData.gConfDirectory
+ FdsCommandDict["build_architecture_list"] = ','.join(self.ArchList)
+ FdsCommandDict["platform_build_directory"] = self.BuildDir
+
+ FdsCommandDict["fd"] = self.FdTargetList
+ FdsCommandDict["fv"] = self.FvTargetList
+ FdsCommandDict["cap"] = self.CapTargetList
+ return FdsCommandDict
+
+ ## Create makefile for the platform and modules in it
+ #
+ # @param CreateDepsMakeFile Flag indicating if the makefile for
+ # modules will be created as well
+ #
+ def CreateMakeFile(self, CreateDepsMakeFile=False):
+ if not CreateDepsMakeFile:
+ return
+ for Pa in self.AutoGenObjectList:
+ Pa.CreateMakeFile(CreateDepsMakeFile)
+
+ ## Create autogen code for platform and modules
+ #
+ # Since there's no autogen code for platform, this method will do nothing
+ # if CreateModuleCodeFile is set to False.
+ #
+ # @param CreateDepsCodeFile Flag indicating if creating module's
+ # autogen code file or not
+ #
+ def CreateCodeFile(self, CreateDepsCodeFile=False):
+ if not CreateDepsCodeFile:
+ return
+ for Pa in self.AutoGenObjectList:
+ Pa.CreateCodeFile(CreateDepsCodeFile)
+
+ ## Create AsBuilt INF file the platform
+ #
+ def CreateAsBuiltInf(self):
+ return
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/__init__.py
new file mode 100644
index 00000000..069f49cc
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/__init__.py
@@ -0,0 +1,11 @@
+## @file
+# Python 'AutoGen' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+__all__ = ["AutoGen"]
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/BPDG.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/BPDG.py
new file mode 100755
index 00000000..cf46de5d
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/BPDG.py
@@ -0,0 +1,158 @@
+## @file
+# Intel Binary Product Data Generation Tool (Intel BPDG).
+# This tool provide a simple process for the creation of a binary file containing read-only
+# configuration data for EDK II platforms that contain Dynamic and DynamicEx PCDs described
+# in VPD sections. It also provide an option for specifying an alternate name for a mapping
+# file of PCD layout for use during the build when the platform integrator selects to use
+# automatic offset calculation.
+#
+# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+import Common.LongFilePathOs as os
+import sys
+import encodings.ascii
+
+from optparse import OptionParser
+from Common import EdkLogger
+from Common.BuildToolError import *
+from Common.BuildVersion import gBUILD_VERSION
+
+from . import StringTable as st
+from . import GenVpd
+
+PROJECT_NAME = st.LBL_BPDG_LONG_UNI
+VERSION = (st.LBL_BPDG_VERSION + " Build " + gBUILD_VERSION)
+
+## Tool entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @retval 0 Tool was successful
+# @retval 1 Tool failed
+#
+def main():
+ global Options, Args
+
+ # Initialize log system
+ EdkLogger.Initialize()
+ Options, Args = MyOptionParser()
+
+ ReturnCode = 0
+
+ if Options.opt_verbose:
+ EdkLogger.SetLevel(EdkLogger.VERBOSE)
+ elif Options.opt_quiet:
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ elif Options.debug_level is not None:
+ EdkLogger.SetLevel(Options.debug_level + 1)
+ else:
+ EdkLogger.SetLevel(EdkLogger.INFO)
+
+ if Options.bin_filename is None:
+ EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -o option to specify the file name for the VPD binary file")
+ if Options.filename is None:
+ EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -m option to specify the file name for the mapping file")
+
+ Force = False
+ if Options.opt_force is not None:
+ Force = True
+
+ if (Args[0] is not None) :
+ StartBpdg(Args[0], Options.filename, Options.bin_filename, Force)
+ else :
+ EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please specify the file which contain the VPD pcd info.",
+ None)
+
+ return ReturnCode
+
+
+## Parse command line options
+#
+# Using standard Python module optparse to parse command line option of this tool.
+#
+# @retval options A optparse.Values object containing the parsed options
+# @retval args Target of BPDG command
+#
+def MyOptionParser():
+ #
+ # Process command line firstly.
+ #
+ parser = OptionParser(version="%s - Version %s" % (PROJECT_NAME, VERSION),
+ description='',
+ prog='BPDG',
+ usage=st.LBL_BPDG_USAGE
+ )
+ parser.add_option('-d', '--debug', action='store', type="int", dest='debug_level',
+ help=st.MSG_OPTION_DEBUG_LEVEL)
+ parser.add_option('-v', '--verbose', action='store_true', dest='opt_verbose',
+ help=st.MSG_OPTION_VERBOSE)
+ parser.add_option('-q', '--quiet', action='store_true', dest='opt_quiet', default=False,
+ help=st.MSG_OPTION_QUIET)
+ parser.add_option('-o', '--vpd-filename', action='store', dest='bin_filename',
+ help=st.MSG_OPTION_VPD_FILENAME)
+ parser.add_option('-m', '--map-filename', action='store', dest='filename',
+ help=st.MSG_OPTION_MAP_FILENAME)
+ parser.add_option('-f', '--force', action='store_true', dest='opt_force',
+ help=st.MSG_OPTION_FORCE)
+
+ (options, args) = parser.parse_args()
+ if len(args) == 0:
+ EdkLogger.info("Please specify the filename.txt file which contain the VPD pcd info!")
+ EdkLogger.info(parser.usage)
+ sys.exit(1)
+ return options, args
+
+
+## Start BPDG and call the main functions
+#
+# This method mainly focus on call GenVPD class member functions to complete
+# BPDG's target. It will process VpdFile override, and provide the interface file
+# information.
+#
+# @Param InputFileName The filename include the vpd type pcd information
+# @param MapFileName The filename of map file that stores vpd type pcd information.
+# This file will be generated by the BPDG tool after fix the offset
+# and adjust the offset to make the pcd data aligned.
+# @param VpdFileName The filename of Vpd file that hold vpd pcd information.
+# @param Force Override the exist Vpdfile or not.
+#
+def StartBpdg(InputFileName, MapFileName, VpdFileName, Force):
+ if os.path.exists(VpdFileName) and not Force:
+ print("\nFile %s already exist, Overwrite(Yes/No)?[Y]: " % VpdFileName)
+ choice = sys.stdin.readline()
+ if choice.strip().lower() not in ['y', 'yes', '']:
+ return
+
+ GenVPD = GenVpd.GenVPD (InputFileName, MapFileName, VpdFileName)
+
+ EdkLogger.info('%-24s = %s' % ("VPD input data file: ", InputFileName))
+ EdkLogger.info('%-24s = %s' % ("VPD output map file: ", MapFileName))
+ EdkLogger.info('%-24s = %s' % ("VPD output binary file: ", VpdFileName))
+
+ GenVPD.ParserInputFile()
+ GenVPD.FormatFileLine()
+ GenVPD.FixVpdOffset()
+ GenVPD.GenerateVpdFile(MapFileName, VpdFileName)
+
+ EdkLogger.info("- Vpd pcd fixed done! -")
+
+if __name__ == '__main__':
+ try:
+ r = main()
+ except FatalError as e:
+ r = e
+ ## 0-127 is a safe return range, and 1 is a standard default error
+ if r < 0 or r > 127: r = 1
+ sys.exit(r)
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/GenVpd.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/GenVpd.py
new file mode 100755
index 00000000..9e887c0a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/GenVpd.py
@@ -0,0 +1,689 @@
+## @file
+# This file include GenVpd class for fix the Vpd type PCD offset, and PcdEntry for describe
+# and process each entry of vpd type PCD.
+#
+# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import absolute_import
+import Common.LongFilePathOs as os
+from io import BytesIO
+from . import StringTable as st
+import array
+import re
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from struct import *
+from Common.DataType import MAX_SIZE_TYPE, MAX_VAL_TYPE, TAB_STAR
+import Common.EdkLogger as EdkLogger
+import Common.BuildToolError as BuildToolError
+
+_FORMAT_CHAR = {1: 'B',
+ 2: 'H',
+ 4: 'I',
+ 8: 'Q'
+ }
+
+## The VPD PCD data structure for store and process each VPD PCD entry.
+#
+# This class contain method to format and pack pcd's value.
+#
+class PcdEntry:
+ def __init__(self, PcdCName, SkuId,PcdOffset, PcdSize, PcdValue, Lineno=None, FileName=None, PcdUnpackValue=None,
+ PcdBinOffset=None, PcdBinSize=None, Alignment=None):
+ self.PcdCName = PcdCName.strip()
+ self.SkuId = SkuId.strip()
+ self.PcdOffset = PcdOffset.strip()
+ self.PcdSize = PcdSize.strip()
+ self.PcdValue = PcdValue.strip()
+ self.Lineno = Lineno.strip()
+ self.FileName = FileName.strip()
+ self.PcdUnpackValue = PcdUnpackValue
+ self.PcdBinOffset = PcdBinOffset
+ self.PcdBinSize = PcdBinSize
+ self.Alignment = Alignment
+
+ if self.PcdValue == '' :
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid PCD format(Name: %s File: %s line: %s) , no Value specified!" % (self.PcdCName, self.FileName, self.Lineno))
+
+ if self.PcdOffset == '' :
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid PCD format(Name: %s File: %s Line: %s) , no Offset specified!" % (self.PcdCName, self.FileName, self.Lineno))
+
+ if self.PcdSize == '' :
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid PCD format(Name: %s File: %s Line: %s), no PcdSize specified!" % (self.PcdCName, self.FileName, self.Lineno))
+
+ self._GenOffsetValue ()
+
+ ## Analyze the string value to judge the PCD's datum type equal to Boolean or not.
+ #
+ # @param ValueString PCD's value
+ # @param Size PCD's size
+ #
+ # @retval True PCD's datum type is Boolean
+ # @retval False PCD's datum type is not Boolean.
+ #
+ def _IsBoolean(self, ValueString, Size):
+ if (Size == "1"):
+ if ValueString.upper() in ["TRUE", "FALSE"]:
+ return True
+ elif ValueString in ["0", "1", "0x0", "0x1", "0x00", "0x01"]:
+ return True
+
+ return False
+
+ ## Convert the PCD's value from string to integer.
+ #
+ # This function will try to convert the Offset value form string to integer
+ # for both hexadecimal and decimal.
+ #
+ def _GenOffsetValue(self):
+ if self.PcdOffset != TAB_STAR:
+ try:
+ self.PcdBinOffset = int (self.PcdOffset)
+ except:
+ try:
+ self.PcdBinOffset = int(self.PcdOffset, 16)
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid offset value %s for PCD %s (File: %s Line: %s)" % (self.PcdOffset, self.PcdCName, self.FileName, self.Lineno))
+
+ ## Pack Boolean type VPD PCD's value form string to binary type.
+ #
+ # @param ValueString The boolean type string for pack.
+ #
+ #
+ def _PackBooleanValue(self, ValueString):
+ if ValueString.upper() == "TRUE" or ValueString in ["1", "0x1", "0x01"]:
+ try:
+ self.PcdValue = pack(_FORMAT_CHAR[1], 1)
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
+ else:
+ try:
+ self.PcdValue = pack(_FORMAT_CHAR[1], 0)
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
+
+ ## Pack Integer type VPD PCD's value form string to binary type.
+ #
+ # @param ValueString The Integer type string for pack.
+ #
+ #
+ def _PackIntValue(self, IntValue, Size):
+ if Size not in _FORMAT_CHAR:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))
+
+ for Type, MaxSize in MAX_SIZE_TYPE.items():
+ if Type == 'BOOLEAN':
+ continue
+ if Size == MaxSize:
+ if IntValue < 0:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "PCD can't be set to negative value %d for PCD %s in %s datum type(File: %s Line: %s)." % (
+ IntValue, self.PcdCName, Type, self.FileName, self.Lineno))
+ elif IntValue > MAX_VAL_TYPE[Type]:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Too large PCD value %d for datum type %s for PCD %s(File: %s Line: %s)." % (
+ IntValue, Type, self.PcdCName, self.FileName, self.Lineno))
+
+ try:
+ self.PcdValue = pack(_FORMAT_CHAR[Size], IntValue)
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
+
+ ## Pack VOID* type VPD PCD's value form string to binary type.
+ #
+ # The VOID* type of string divided into 3 sub-type:
+ # 1: L"String"/L'String', Unicode type string.
+ # 2: "String"/'String', Ascii type string.
+ # 3: {bytearray}, only support byte-array.
+ #
+ # @param ValueString The Integer type string for pack.
+ #
+ def _PackPtrValue(self, ValueString, Size):
+ if ValueString.startswith('L"') or ValueString.startswith("L'"):
+ self._PackUnicode(ValueString, Size)
+ elif ValueString.startswith('{') and ValueString.endswith('}'):
+ self._PackByteArray(ValueString, Size)
+ elif (ValueString.startswith('"') and ValueString.endswith('"')) or (ValueString.startswith("'") and ValueString.endswith("'")):
+ self._PackString(ValueString, Size)
+ else:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid VOID* type PCD %s value %s (File: %s Line: %s)" % (self.PcdCName, ValueString, self.FileName, self.Lineno))
+
+ ## Pack an Ascii PCD value.
+ #
+ # An Ascii string for a PCD should be in format as ""/''.
+ #
+ def _PackString(self, ValueString, Size):
+ if (Size < 0):
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
+ if (ValueString == ""):
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno))
+
+ QuotedFlag = True
+ if ValueString.startswith("'"):
+ QuotedFlag = False
+
+ ValueString = ValueString[1:-1]
+ # No null-terminator in 'string'
+ if (QuotedFlag and len(ValueString) + 1 > Size) or (not QuotedFlag and len(ValueString) > Size):
+ EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
+ "PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
+ try:
+ self.PcdValue = pack('%ds' % Size, ValueString.encode('utf-8'))
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
+
+ ## Pack a byte-array PCD value.
+ #
+ # A byte-array for a PCD should be in format as {0x01, 0x02, ...}.
+ #
+ def _PackByteArray(self, ValueString, Size):
+ if (Size < 0):
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
+ if (ValueString == ""):
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno))
+
+ ValueString = ValueString.strip()
+ ValueString = ValueString.lstrip('{').strip('}')
+ ValueList = ValueString.split(',')
+ ValueList = [item.strip() for item in ValueList]
+
+ if len(ValueList) > Size:
+ EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
+ "The byte array %s is too large for size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
+
+ ReturnArray = array.array('B')
+
+ for Index in range(len(ValueList)):
+ Value = None
+ if ValueList[Index].lower().startswith('0x'):
+ # translate hex value
+ try:
+ Value = int(ValueList[Index], 16)
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "The value item %s in byte array %s is an invalid HEX value.(File: %s Line: %s)" % \
+ (ValueList[Index], ValueString, self.FileName, self.Lineno))
+ else:
+ # translate decimal value
+ try:
+ Value = int(ValueList[Index], 10)
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "The value item %s in byte array %s is an invalid DECIMAL value.(File: %s Line: %s)" % \
+ (ValueList[Index], ValueString, self.FileName, self.Lineno))
+
+ if Value > 255:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "The value item %s in byte array %s do not in range 0 ~ 0xFF(File: %s Line: %s)" % \
+ (ValueList[Index], ValueString, self.FileName, self.Lineno))
+
+ ReturnArray.append(Value)
+
+ for Index in range(len(ValueList), Size):
+ ReturnArray.append(0)
+
+ self.PcdValue = ReturnArray.tolist()
+
+ ## Pack a unicode PCD value into byte array.
+ #
+ # A unicode string for a PCD should be in format as L""/L''.
+ #
+ def _PackUnicode(self, UnicodeString, Size):
+ if (Size < 0):
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % \
+ (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
+
+ QuotedFlag = True
+ if UnicodeString.startswith("L'"):
+ QuotedFlag = False
+ UnicodeString = UnicodeString[2:-1]
+
+ # No null-terminator in L'string'
+ if (QuotedFlag and (len(UnicodeString) + 1) * 2 > Size) or (not QuotedFlag and len(UnicodeString) * 2 > Size):
+ EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
+ "The size of unicode string %s is too larger for size %s(File: %s Line: %s)" % \
+ (UnicodeString, Size, self.FileName, self.Lineno))
+
+ ReturnArray = array.array('B')
+ for Value in UnicodeString:
+ try:
+ ReturnArray.append(ord(Value))
+ ReturnArray.append(0)
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid unicode character %s in unicode string %s(File: %s Line: %s)" % \
+ (Value, UnicodeString, self.FileName, self.Lineno))
+
+ for Index in range(len(UnicodeString) * 2, Size):
+ ReturnArray.append(0)
+
+ self.PcdValue = ReturnArray.tolist()
+
+
+
+## The class implementing the BPDG VPD PCD offset fix process
+#
+# The VPD PCD offset fix process includes:
+# 1. Parse the input guided.txt file and store it in the data structure;
+# 2. Format the input file data to remove unused lines;
+# 3. Fixed offset if needed;
+# 4. Generate output file, including guided.map and guided.bin file;
+#
+class GenVPD :
+ ## Constructor of DscBuildData
+ #
+ # Initialize object of GenVPD
+ # @Param InputFileName The filename include the vpd type pcd information
+ # @param MapFileName The filename of map file that stores vpd type pcd information.
+ # This file will be generated by the BPDG tool after fix the offset
+ # and adjust the offset to make the pcd data aligned.
+ # @param VpdFileName The filename of Vpd file that hold vpd pcd information.
+ #
+ def __init__(self, InputFileName, MapFileName, VpdFileName):
+ self.InputFileName = InputFileName
+ self.MapFileName = MapFileName
+ self.VpdFileName = VpdFileName
+ self.FileLinesList = []
+ self.PcdFixedOffsetSizeList = []
+ self.PcdUnknownOffsetList = []
+ try:
+ fInputfile = open(InputFileName, "r")
+ try:
+ self.FileLinesList = fInputfile.readlines()
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FILE_READ_FAILURE, "File read failed for %s" % InputFileName, None)
+ finally:
+ fInputfile.close()
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % InputFileName, None)
+
+ ##
+ # Parser the input file which is generated by the build tool. Convert the value of each pcd's
+ # from string to its real format. Also remove the useless line in the input file.
+ #
+ def ParserInputFile (self):
+ count = 0
+ for line in self.FileLinesList:
+ # Strip "\r\n" generated by readlines ().
+ line = line.strip()
+ line = line.rstrip(os.linesep)
+
+ # Skip the comment line
+ if (not line.startswith("#")) and len(line) > 1 :
+ #
+ # Enhanced for support "|" character in the string.
+ #
+ ValueList = ['', '', '', '', '']
+
+ ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')
+ PtrValue = ValueRe.findall(line)
+
+ ValueUpdateFlag = False
+
+ if len(PtrValue) >= 1:
+ line = re.sub(ValueRe, '', line)
+ ValueUpdateFlag = True
+
+ TokenList = line.split('|')
+ ValueList[0:len(TokenList)] = TokenList
+
+ if ValueUpdateFlag:
+ ValueList[4] = PtrValue[0]
+ self.FileLinesList[count] = ValueList
+ # Store the line number
+ self.FileLinesList[count].append(str(count + 1))
+ elif len(line) <= 1 :
+ # Set the blank line to "None"
+ self.FileLinesList[count] = None
+ else :
+ # Set the comment line to "None"
+ self.FileLinesList[count] = None
+ count += 1
+
+ # The line count contain usage information
+ count = 0
+ # Delete useless lines
+ while (True) :
+ try :
+ if (self.FileLinesList[count] is None) :
+ del(self.FileLinesList[count])
+ else :
+ count += 1
+ except :
+ break
+ #
+ # After remove the useless line, if there are no data remain in the file line list,
+ # Report warning messages to user's.
+ #
+ if len(self.FileLinesList) == 0 :
+ EdkLogger.warn('BPDG', BuildToolError.RESOURCE_NOT_AVAILABLE,
+ "There are no VPD type pcds defined in DSC file, Please check it.")
+
+ # Process the pcds one by one base on the pcd's value and size
+ count = 0
+ for line in self.FileLinesList:
+ if line is not None :
+ PCD = PcdEntry(line[0], line[1], line[2], line[3], line[4], line[5], self.InputFileName)
+ # Strip the space char
+ PCD.PcdCName = PCD.PcdCName.strip(' ')
+ PCD.SkuId = PCD.SkuId.strip(' ')
+ PCD.PcdOffset = PCD.PcdOffset.strip(' ')
+ PCD.PcdSize = PCD.PcdSize.strip(' ')
+ PCD.PcdValue = PCD.PcdValue.strip(' ')
+ PCD.Lineno = PCD.Lineno.strip(' ')
+
+ #
+ # Store the original pcd value.
+ # This information will be useful while generate the output map file.
+ #
+ PCD.PcdUnpackValue = str(PCD.PcdValue)
+
+ #
+ # Translate PCD size string to an integer value.
+ PackSize = None
+ try:
+ PackSize = int(PCD.PcdSize, 10)
+ PCD.PcdBinSize = PackSize
+ except:
+ try:
+ PackSize = int(PCD.PcdSize, 16)
+ PCD.PcdBinSize = PackSize
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD size value %s at file: %s line: %s" % (PCD.PcdSize, self.InputFileName, PCD.Lineno))
+
+ #
+ # If value is Unicode string (e.g. L""), then use 2-byte alignment
+ # If value is byte array (e.g. {}), then use 8-byte alignment
+ #
+ PCD.PcdOccupySize = PCD.PcdBinSize
+ if PCD.PcdUnpackValue.startswith("{"):
+ Alignment = 8
+ elif PCD.PcdUnpackValue.startswith("L"):
+ Alignment = 2
+ else:
+ Alignment = 1
+
+ PCD.Alignment = Alignment
+ if PCD.PcdOffset != TAB_STAR:
+ if PCD.PcdOccupySize % Alignment != 0:
+ if PCD.PcdUnpackValue.startswith("{"):
+ EdkLogger.warn("BPDG", "The offset value of PCD %s is not 8-byte aligned!" %(PCD.PcdCName), File=self.InputFileName)
+ else:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, 'The offset value of PCD %s should be %s-byte aligned.' % (PCD.PcdCName, Alignment))
+ else:
+ if PCD.PcdOccupySize % Alignment != 0:
+ PCD.PcdOccupySize = (PCD.PcdOccupySize // Alignment + 1) * Alignment
+
+ PackSize = PCD.PcdOccupySize
+ if PCD._IsBoolean(PCD.PcdValue, PCD.PcdSize):
+ PCD._PackBooleanValue(PCD.PcdValue)
+ self.FileLinesList[count] = PCD
+ count += 1
+ continue
+ #
+ # Try to translate value to an integer firstly.
+ #
+ IsInteger = True
+ PackValue = None
+ try:
+ PackValue = int(PCD.PcdValue)
+ except:
+ try:
+ PackValue = int(PCD.PcdValue, 16)
+ except:
+ IsInteger = False
+
+ if IsInteger:
+ PCD._PackIntValue(PackValue, PackSize)
+ else:
+ PCD._PackPtrValue(PCD.PcdValue, PackSize)
+
+ self.FileLinesList[count] = PCD
+ count += 1
+ else :
+ continue
+
+ ##
+ # This function used to create a clean list only contain useful information and reorganized to make it
+ # easy to be sorted
+ #
+ def FormatFileLine (self) :
+
+ for eachPcd in self.FileLinesList :
+ if eachPcd.PcdOffset != TAB_STAR :
+ # Use pcd's Offset value as key, and pcd's Value as value
+ self.PcdFixedOffsetSizeList.append(eachPcd)
+ else :
+ # Use pcd's CName as key, and pcd's Size as value
+ self.PcdUnknownOffsetList.append(eachPcd)
+
+
+ ##
+ # This function is use to fix the offset value which the not specified in the map file.
+ # Usually it use the star (meaning any offset) character in the offset field
+ #
+ def FixVpdOffset (self):
+ # At first, the offset should start at 0
+ # Sort fixed offset list in order to find out where has free spaces for the pcd's offset
+ # value is TAB_STAR to insert into.
+
+ self.PcdFixedOffsetSizeList.sort(key=lambda x: x.PcdBinOffset)
+
+ #
+ # Sort the un-fixed pcd's offset by its size.
+ #
+ self.PcdUnknownOffsetList.sort(key=lambda x: x.PcdBinSize)
+
+ index =0
+ for pcd in self.PcdUnknownOffsetList:
+ index += 1
+ if pcd.PcdCName == ".".join(("gEfiMdeModulePkgTokenSpaceGuid", "PcdNvStoreDefaultValueBuffer")):
+ if index != len(self.PcdUnknownOffsetList):
+ for i in range(len(self.PcdUnknownOffsetList) - index):
+ self.PcdUnknownOffsetList[index+i -1 ], self.PcdUnknownOffsetList[index+i] = self.PcdUnknownOffsetList[index+i], self.PcdUnknownOffsetList[index+i -1]
+
+ #
+ # Process all Offset value are TAB_STAR
+ #
+ if (len(self.PcdFixedOffsetSizeList) == 0) and (len(self.PcdUnknownOffsetList) != 0) :
+ # The offset start from 0
+ NowOffset = 0
+ for Pcd in self.PcdUnknownOffsetList :
+ if NowOffset % Pcd.Alignment != 0:
+ NowOffset = (NowOffset// Pcd.Alignment + 1) * Pcd.Alignment
+ Pcd.PcdBinOffset = NowOffset
+ Pcd.PcdOffset = str(hex(Pcd.PcdBinOffset))
+ NowOffset += Pcd.PcdOccupySize
+
+ self.PcdFixedOffsetSizeList = self.PcdUnknownOffsetList
+ return
+
+ # Check the offset of VPD type pcd's offset start from 0.
+ if self.PcdFixedOffsetSizeList[0].PcdBinOffset != 0 :
+ EdkLogger.warn("BPDG", "The offset of VPD type pcd should start with 0, please check it.",
+ None)
+
+ # Judge whether the offset in fixed pcd offset list is overlapped or not.
+ lenOfList = len(self.PcdFixedOffsetSizeList)
+ count = 0
+ while (count < lenOfList - 1) :
+ PcdNow = self.PcdFixedOffsetSizeList[count]
+ PcdNext = self.PcdFixedOffsetSizeList[count+1]
+ # Two pcd's offset is same
+ if PcdNow.PcdBinOffset == PcdNext.PcdBinOffset :
+ EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
+ "The offset of %s at line: %s is same with %s at line: %s in file %s" % \
+ (PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
+ None)
+
+ # Overlapped
+ if PcdNow.PcdBinOffset + PcdNow.PcdOccupySize > PcdNext.PcdBinOffset :
+ EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
+ "The offset of %s at line: %s is overlapped with %s at line: %s in file %s" % \
+ (PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
+ None)
+
+ # Has free space, raise a warning message
+ if PcdNow.PcdBinOffset + PcdNow.PcdOccupySize < PcdNext.PcdBinOffset :
+ EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
+ "The offsets have free space of between %s at line: %s and %s at line: %s in file %s" % \
+ (PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
+ None)
+ count += 1
+
+ LastOffset = self.PcdFixedOffsetSizeList[0].PcdBinOffset
+ FixOffsetSizeListCount = 0
+ lenOfList = len(self.PcdFixedOffsetSizeList)
+ lenOfUnfixedList = len(self.PcdUnknownOffsetList)
+
+ ##
+ # Insert the un-fixed offset pcd's list into fixed offset pcd's list if has free space between those pcds.
+ #
+ while (FixOffsetSizeListCount < lenOfList) :
+
+ eachFixedPcd = self.PcdFixedOffsetSizeList[FixOffsetSizeListCount]
+ NowOffset = eachFixedPcd.PcdBinOffset
+
+ # Has free space
+ if LastOffset < NowOffset :
+ if lenOfUnfixedList != 0 :
+ countOfUnfixedList = 0
+ while(countOfUnfixedList < lenOfUnfixedList) :
+ eachUnfixedPcd = self.PcdUnknownOffsetList[countOfUnfixedList]
+ needFixPcdSize = eachUnfixedPcd.PcdOccupySize
+ # Not been fixed
+ if eachUnfixedPcd.PcdOffset == TAB_STAR :
+ if LastOffset % eachUnfixedPcd.Alignment != 0:
+ LastOffset = (LastOffset // eachUnfixedPcd.Alignment + 1) * eachUnfixedPcd.Alignment
+ # The offset un-fixed pcd can write into this free space
+ if needFixPcdSize <= (NowOffset - LastOffset) :
+ # Change the offset value of un-fixed pcd
+ eachUnfixedPcd.PcdOffset = str(hex(LastOffset))
+ eachUnfixedPcd.PcdBinOffset = LastOffset
+ # Insert this pcd into fixed offset pcd list.
+ self.PcdFixedOffsetSizeList.insert(FixOffsetSizeListCount, eachUnfixedPcd)
+
+ # Delete the item's offset that has been fixed and added into fixed offset list
+ self.PcdUnknownOffsetList.pop(countOfUnfixedList)
+
+ # After item added, should enlarge the length of fixed pcd offset list
+ lenOfList += 1
+ FixOffsetSizeListCount += 1
+
+ # Decrease the un-fixed pcd offset list's length
+ lenOfUnfixedList -= 1
+
+ # Modify the last offset value
+ LastOffset += needFixPcdSize
+ else :
+ # It can not insert into those two pcds, need to check still has other space can store it.
+ LastOffset = NowOffset + self.PcdFixedOffsetSizeList[FixOffsetSizeListCount].PcdOccupySize
+ FixOffsetSizeListCount += 1
+ break
+
+ # Set the FixOffsetSizeListCount = lenOfList for quit the loop
+ else :
+ FixOffsetSizeListCount = lenOfList
+
+ # No free space, smoothly connect with previous pcd.
+ elif LastOffset == NowOffset :
+ LastOffset = NowOffset + eachFixedPcd.PcdOccupySize
+ FixOffsetSizeListCount += 1
+ # Usually it will not enter into this thunk, if so, means it overlapped.
+ else :
+ EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE,
+ "The offset value definition has overlapped at pcd: %s, its offset is: %s, in file: %s line: %s" % \
+ (eachFixedPcd.PcdCName, eachFixedPcd.PcdOffset, eachFixedPcd.InputFileName, eachFixedPcd.Lineno),
+ None)
+ FixOffsetSizeListCount += 1
+
+ # Continue to process the un-fixed offset pcd's list, add this time, just append them behind the fixed pcd's offset list.
+ lenOfUnfixedList = len(self.PcdUnknownOffsetList)
+ lenOfList = len(self.PcdFixedOffsetSizeList)
+ while (lenOfUnfixedList > 0) :
+ # Still has items need to process
+ # The last pcd instance
+ LastPcd = self.PcdFixedOffsetSizeList[lenOfList-1]
+ NeedFixPcd = self.PcdUnknownOffsetList[0]
+
+ NeedFixPcd.PcdBinOffset = LastPcd.PcdBinOffset + LastPcd.PcdOccupySize
+ if NeedFixPcd.PcdBinOffset % NeedFixPcd.Alignment != 0:
+ NeedFixPcd.PcdBinOffset = (NeedFixPcd.PcdBinOffset // NeedFixPcd.Alignment + 1) * NeedFixPcd.Alignment
+
+ NeedFixPcd.PcdOffset = str(hex(NeedFixPcd.PcdBinOffset))
+
+ # Insert this pcd into fixed offset pcd list's tail.
+ self.PcdFixedOffsetSizeList.insert(lenOfList, NeedFixPcd)
+ # Delete the item's offset that has been fixed and added into fixed offset list
+ self.PcdUnknownOffsetList.pop(0)
+
+ lenOfList += 1
+ lenOfUnfixedList -= 1
+ ##
+ # Write the final data into output files.
+ #
+ def GenerateVpdFile (self, MapFileName, BinFileName):
+ #Open an VPD file to process
+
+ try:
+ fVpdFile = open(BinFileName, "wb")
+ except:
+ # Open failed
+ EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None)
+
+ try :
+ fMapFile = open(MapFileName, "w")
+ except:
+ # Open failed
+ EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None)
+
+ # Use a instance of BytesIO to cache data
+ fStringIO = BytesIO()
+
+ # Write the header of map file.
+ try :
+ fMapFile.write (st.MAP_FILE_COMMENT_TEMPLATE + "\n")
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None)
+
+ for eachPcd in self.PcdFixedOffsetSizeList :
+ # write map file
+ try :
+ fMapFile.write("%s | %s | %s | %s | %s \n" % (eachPcd.PcdCName, eachPcd.SkuId, eachPcd.PcdOffset, eachPcd.PcdSize, eachPcd.PcdUnpackValue))
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None)
+
+ # Write Vpd binary file
+ fStringIO.seek (eachPcd.PcdBinOffset)
+ if isinstance(eachPcd.PcdValue, list):
+ for i in range(len(eachPcd.PcdValue)):
+ Value = eachPcd.PcdValue[i:i + 1]
+ if isinstance(bytes(Value), str):
+ fStringIO.write(chr(Value[0]))
+ else:
+ fStringIO.write(bytes(Value))
+ else:
+ fStringIO.write (eachPcd.PcdValue)
+
+ try :
+ fVpdFile.write (fStringIO.getvalue())
+ except:
+ EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.VpdFileName, None)
+
+ fStringIO.close ()
+ fVpdFile.close ()
+ fMapFile.close ()
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/StringTable.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/StringTable.py
new file mode 100644
index 00000000..e1622eca
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/StringTable.py
@@ -0,0 +1,72 @@
+## @file
+# This file is used to define strings used in the BPDG tool
+#
+# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+##
+
+
+#string table starts here...
+
+#strings are classified as following types
+# MSG_...: it is a message string
+# ERR_...: it is a error string
+# WRN_...: it is a warning string
+# LBL_...: it is a UI label (window title, control label, etc.)
+# MNU_...: it is a menu item label
+# HLP_...: it is a help string
+# CFG_...: it is a config string used in module. Do not need to translate it.
+# XRC_...: it is a user visible string from xrc file
+
+MAP_FILE_COMMENT_TEMPLATE = \
+"""
+## @file
+#
+# THIS IS AUTO-GENERATED FILE BY BPDG TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
+#
+# This file lists all VPD informations for a platform fixed/adjusted by BPDG tool.
+#
+# Copyright (c) 2010 -2018, Intel Corporation. All rights reserved.<BR>
+# This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+"""
+
+
+
+LBL_BPDG_LONG_UNI = (u"Intel(r) Binary Product Data Generation Tool (Intel(r) BPDG)")
+LBL_BPDG_VERSION = (u"1.0")
+LBL_BPDG_USAGE = \
+(
+"""BPDG options -o Filename.bin -m Filename.map Filename.txt
+Copyright (c) 2010 - 2018, Intel Corporation All Rights Reserved.
+
+ Intel(r) Binary Product Data Generation Tool (Intel(r) BPDG)
+
+Required Flags:
+ -o BIN_FILENAME, --vpd-filename=BIN_FILENAME
+ Specify the file name for the VPD binary file
+ -m FILENAME, --map-filename=FILENAME
+ Generate file name for consumption during the build that contains
+ the mapping of Pcd name, offset, datum size and value derived
+ from the input file and any automatic calculations.
+"""
+)
+
+MSG_OPTION_HELP = ("Show this help message and exit.")
+MSG_OPTION_DEBUG_LEVEL = ("Print DEBUG statements, where DEBUG_LEVEL is 0-9.")
+MSG_OPTION_VERBOSE = ("Print informational statements.")
+MSG_OPTION_QUIET = ("Returns the exit code and will display only error messages.")
+MSG_OPTION_VPD_FILENAME = ("Specify the file name for the VPD binary file.")
+MSG_OPTION_MAP_FILENAME = ("Generate file name for consumption during the build that contains the mapping of Pcd name, offset, datum size and value derived from the input file and any automatic calculations.")
+MSG_OPTION_FORCE = ("Will force overwriting existing output files rather than returning an error message.")
+
+ERR_INVALID_DEBUG_LEVEL = ("Invalid level for debug message. Only "
+ "'DEBUG', 'INFO', 'WARNING', 'ERROR', "
+ "'CRITICAL' are supported for debugging "
+ "messages.")
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/__init__.py
new file mode 100644
index 00000000..e47e479e
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/BPDG/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'BPDG' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Capsule/GenerateCapsule.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Capsule/GenerateCapsule.py
new file mode 100755
index 00000000..f8d85bca
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Capsule/GenerateCapsule.py
@@ -0,0 +1,1051 @@
+## @file
+# Generate a capsule.
+#
+# This tool generates a UEFI Capsule around an FMP Capsule. The capsule payload
+# be signed using signtool or OpenSSL and if it is signed the signed content
+# includes an FMP Payload Header.
+#
+# This tool is intended to be used to generate UEFI Capsules to update the
+# system firmware or device firmware for integrated devices. In order to
+# keep the tool as simple as possible, it has the following limitations:
+# * Do not support vendor code bytes in a capsule.
+#
+# Copyright (c) 2018 - 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+GenerateCapsule
+'''
+
+import sys
+import argparse
+import uuid
+import struct
+import subprocess
+import os
+import tempfile
+import shutil
+import platform
+import json
+from Common.Uefi.Capsule.UefiCapsuleHeader import UefiCapsuleHeaderClass
+from Common.Uefi.Capsule.FmpCapsuleHeader import FmpCapsuleHeaderClass
+from Common.Uefi.Capsule.FmpAuthHeader import FmpAuthHeaderClass
+from Common.Uefi.Capsule.CapsuleDependency import CapsuleDependencyClass
+from Common.Edk2.Capsule.FmpPayloadHeader import FmpPayloadHeaderClass
+
+#
+# Globals for help information
+#
+__prog__ = 'GenerateCapsule'
+__version__ = '0.9'
+__copyright__ = 'Copyright (c) 2018, Intel Corporation. All rights reserved.'
+__description__ = 'Generate a capsule.\n'
+
+def SignPayloadSignTool (Payload, ToolPath, PfxFile, Verbose = False):
+ #
+ # Create a temporary directory
+ #
+ TempDirectoryName = tempfile.mkdtemp()
+
+ #
+ # Generate temp file name for the payload contents
+ #
+ TempFileName = os.path.join (TempDirectoryName, 'Payload.bin')
+
+ #
+ # Create temporary payload file for signing
+ #
+ try:
+ with open (TempFileName, 'wb') as File:
+ File.write (Payload)
+ except:
+ shutil.rmtree (TempDirectoryName)
+ raise ValueError ('GenerateCapsule: error: can not write temporary payload file.')
+
+ #
+ # Build signtool command
+ #
+ if ToolPath is None:
+ ToolPath = ''
+ Command = ''
+ Command = Command + '"{Path}" '.format (Path = os.path.join (ToolPath, 'signtool.exe'))
+ Command = Command + 'sign /fd sha256 /p7ce DetachedSignedData /p7co 1.2.840.113549.1.7.2 '
+ Command = Command + '/p7 {TempDir} '.format (TempDir = TempDirectoryName)
+ Command = Command + '/f {PfxFile} '.format (PfxFile = PfxFile)
+ Command = Command + TempFileName
+ if Verbose:
+ print (Command)
+
+ #
+ # Sign the input file using the specified private key
+ #
+ try:
+ Process = subprocess.Popen (Command, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, shell = True)
+ Result = Process.communicate('')
+ except:
+ shutil.rmtree (TempDirectoryName)
+ raise ValueError ('GenerateCapsule: error: can not run signtool.')
+
+ if Process.returncode != 0:
+ shutil.rmtree (TempDirectoryName)
+ print (Result[1].decode())
+ raise ValueError ('GenerateCapsule: error: signtool failed.')
+
+ #
+ # Read the signature from the generated output file
+ #
+ try:
+ with open (TempFileName + '.p7', 'rb') as File:
+ Signature = File.read ()
+ except:
+ shutil.rmtree (TempDirectoryName)
+ raise ValueError ('GenerateCapsule: error: can not read signature file.')
+
+ shutil.rmtree (TempDirectoryName)
+ return Signature
+
+def VerifyPayloadSignTool (Payload, CertData, ToolPath, PfxFile, Verbose = False):
+ print ('signtool verify is not supported.')
+ raise ValueError ('GenerateCapsule: error: signtool verify is not supported.')
+
+def SignPayloadOpenSsl (Payload, ToolPath, SignerPrivateCertFile, OtherPublicCertFile, TrustedPublicCertFile, Verbose = False):
+ #
+ # Build openssl command
+ #
+ if ToolPath is None:
+ ToolPath = ''
+ Command = ''
+ Command = Command + '"{Path}" '.format (Path = os.path.join (ToolPath, 'openssl'))
+ Command = Command + 'smime -sign -binary -outform DER -md sha256 '
+ Command = Command + '-signer "{Private}" -certfile "{Public}"'.format (Private = SignerPrivateCertFile, Public = OtherPublicCertFile)
+ if Verbose:
+ print (Command)
+
+ #
+ # Sign the input file using the specified private key and capture signature from STDOUT
+ #
+ try:
+ Process = subprocess.Popen (Command, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, shell = True)
+ Result = Process.communicate(input = Payload)
+ Signature = Result[0]
+ except:
+ raise ValueError ('GenerateCapsule: error: can not run openssl.')
+
+ if Process.returncode != 0:
+ print (Result[1].decode())
+ raise ValueError ('GenerateCapsule: error: openssl failed.')
+
+ return Signature
+
+def VerifyPayloadOpenSsl (Payload, CertData, ToolPath, SignerPrivateCertFile, OtherPublicCertFile, TrustedPublicCertFile, Verbose = False):
+ #
+ # Create a temporary directory
+ #
+ TempDirectoryName = tempfile.mkdtemp()
+
+ #
+ # Generate temp file name for the payload contents
+ #
+ TempFileName = os.path.join (TempDirectoryName, 'Payload.bin')
+
+ #
+ # Create temporary payload file for verification
+ #
+ try:
+ with open (TempFileName, 'wb') as File:
+ File.write (Payload)
+ except:
+ shutil.rmtree (TempDirectoryName)
+ raise ValueError ('GenerateCapsule: error: can not write temporary payload file.')
+
+ #
+ # Build openssl command
+ #
+ if ToolPath is None:
+ ToolPath = ''
+ Command = ''
+ Command = Command + '"{Path}" '.format (Path = os.path.join (ToolPath, 'openssl'))
+ Command = Command + 'smime -verify -inform DER '
+ Command = Command + '-content {Content} -CAfile "{Public}"'.format (Content = TempFileName, Public = TrustedPublicCertFile)
+ if Verbose:
+ print (Command)
+
+ #
+ # Verify signature
+ #
+ try:
+ Process = subprocess.Popen (Command, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, shell = True)
+ Result = Process.communicate(input = CertData)
+ except:
+ shutil.rmtree (TempDirectoryName)
+ raise ValueError ('GenerateCapsule: error: can not run openssl.')
+
+ if Process.returncode != 0:
+ shutil.rmtree (TempDirectoryName)
+ print (Result[1].decode())
+ raise ValueError ('GenerateCapsule: error: openssl failed.')
+
+ shutil.rmtree (TempDirectoryName)
+ return Payload
+
+if __name__ == '__main__':
+ def convert_arg_line_to_args(arg_line):
+ for arg in arg_line.split():
+ if not arg.strip():
+ continue
+ yield arg
+
+ def ValidateUnsignedInteger (Argument):
+ try:
+ Value = int (Argument, 0)
+ except:
+ Message = '{Argument} is not a valid integer value.'.format (Argument = Argument)
+ raise argparse.ArgumentTypeError (Message)
+ if Value < 0:
+ Message = '{Argument} is a negative value.'.format (Argument = Argument)
+ raise argparse.ArgumentTypeError (Message)
+ return Value
+
+ def ValidateRegistryFormatGuid (Argument):
+ try:
+ Value = uuid.UUID (Argument)
+ except:
+ Message = '{Argument} is not a valid registry format GUID value.'.format (Argument = Argument)
+ raise argparse.ArgumentTypeError (Message)
+ return Value
+
+ def ConvertJsonValue (Config, FieldName, Convert, Required = True, Default = None, Open = False):
+ if FieldName not in Config:
+ if Required:
+ print ('GenerateCapsule: error: Payload descriptor invalid syntax. Could not find {Key} in payload descriptor.'.format(Key = FieldName))
+ sys.exit (1)
+ return Default
+ try:
+ Value = Convert (Config[FieldName])
+ except:
+ print ('GenerateCapsule: error: {Key} in payload descriptor has invalid syntax.'.format (Key = FieldName))
+ sys.exit (1)
+ if Open:
+ try:
+ Value = open (Value, "rb")
+ except:
+ print ('GenerateCapsule: error: can not open file {File}'.format (File = FieldName))
+ sys.exit (1)
+ return Value
+
+ def DecodeJsonFileParse (Json):
+ if 'Payloads' not in Json:
+ print ('GenerateCapsule: error "Payloads" section not found in JSON file {File}'.format (File = args.JsonFile.name))
+ sys.exit (1)
+ for Config in Json['Payloads']:
+ #
+ # Parse fields from JSON
+ #
+ PayloadFile = ConvertJsonValue (Config, 'Payload', os.path.expandvars, Required = False)
+ Guid = ConvertJsonValue (Config, 'Guid', ValidateRegistryFormatGuid, Required = False)
+ FwVersion = ConvertJsonValue (Config, 'FwVersion', ValidateUnsignedInteger, Required = False)
+ LowestSupportedVersion = ConvertJsonValue (Config, 'LowestSupportedVersion', ValidateUnsignedInteger, Required = False)
+ HardwareInstance = ConvertJsonValue (Config, 'HardwareInstance', ValidateUnsignedInteger, Required = False, Default = 0)
+ MonotonicCount = ConvertJsonValue (Config, 'MonotonicCount', ValidateUnsignedInteger, Required = False, Default = 0)
+ SignToolPfxFile = ConvertJsonValue (Config, 'SignToolPfxFile', os.path.expandvars, Required = False, Default = None, Open = True)
+ OpenSslSignerPrivateCertFile = ConvertJsonValue (Config, 'OpenSslSignerPrivateCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
+ OpenSslOtherPublicCertFile = ConvertJsonValue (Config, 'OpenSslOtherPublicCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
+ OpenSslTrustedPublicCertFile = ConvertJsonValue (Config, 'OpenSslTrustedPublicCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
+ SigningToolPath = ConvertJsonValue (Config, 'SigningToolPath', os.path.expandvars, Required = False, Default = None)
+ UpdateImageIndex = ConvertJsonValue (Config, 'UpdateImageIndex', ValidateUnsignedInteger, Required = False, Default = 1)
+
+ PayloadDescriptorList.append (PayloadDescriptor (
+ PayloadFile,
+ Guid,
+ FwVersion,
+ LowestSupportedVersion,
+ MonotonicCount,
+ HardwareInstance,
+ UpdateImageIndex,
+ SignToolPfxFile,
+ OpenSslSignerPrivateCertFile,
+ OpenSslOtherPublicCertFile,
+ OpenSslTrustedPublicCertFile,
+ SigningToolPath
+ ))
+
+ def EncodeJsonFileParse (Json):
+ if 'EmbeddedDrivers' not in Json:
+ print ('GenerateCapsule: warning "EmbeddedDrivers" section not found in JSON file {File}'.format (File = args.JsonFile.name))
+ else:
+ for Config in Json['EmbeddedDrivers']:
+ EmbeddedDriverFile = ConvertJsonValue(Config, 'Driver', os.path.expandvars, Open = True)
+ #
+ #Read EmbeddedDriver file
+ #
+ try:
+ if args.Verbose:
+ print ('Read EmbeddedDriver file {File}'.format (File = EmbeddedDriverFile.name))
+ Driver = EmbeddedDriverFile.read()
+ except:
+ print ('GenerateCapsule: error: can not read EmbeddedDriver file {File}'.format (File = EmbeddedDriverFile.name))
+ sys.exit (1)
+ EmbeddedDriverDescriptorList.append (Driver)
+
+ if 'Payloads' not in Json:
+ print ('GenerateCapsule: error: "Payloads" section not found in JSON file {File}'.format (File = args.JsonFile.name))
+ sys.exit (1)
+ for Config in Json['Payloads']:
+ #
+ # Parse fields from JSON
+ #
+ PayloadFile = ConvertJsonValue (Config, 'Payload', os.path.expandvars, Open = True)
+ Guid = ConvertJsonValue (Config, 'Guid', ValidateRegistryFormatGuid)
+ FwVersion = ConvertJsonValue (Config, 'FwVersion', ValidateUnsignedInteger)
+ LowestSupportedVersion = ConvertJsonValue (Config, 'LowestSupportedVersion', ValidateUnsignedInteger)
+ HardwareInstance = ConvertJsonValue (Config, 'HardwareInstance', ValidateUnsignedInteger, Required = False, Default = 0)
+ UpdateImageIndex = ConvertJsonValue (Config, 'UpdateImageIndex', ValidateUnsignedInteger, Required = False, Default = 1)
+ MonotonicCount = ConvertJsonValue (Config, 'MonotonicCount', ValidateUnsignedInteger, Required = False, Default = 0)
+ SignToolPfxFile = ConvertJsonValue (Config, 'SignToolPfxFile', os.path.expandvars, Required = False, Default = None, Open = True)
+ OpenSslSignerPrivateCertFile = ConvertJsonValue (Config, 'OpenSslSignerPrivateCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
+ OpenSslOtherPublicCertFile = ConvertJsonValue (Config, 'OpenSslOtherPublicCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
+ OpenSslTrustedPublicCertFile = ConvertJsonValue (Config, 'OpenSslTrustedPublicCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
+ SigningToolPath = ConvertJsonValue (Config, 'SigningToolPath', os.path.expandvars, Required = False, Default = None)
+ DepexExp = ConvertJsonValue (Config, 'Dependencies', str, Required = False, Default = None)
+
+ #
+ # Read binary input file
+ #
+ try:
+ if args.Verbose:
+ print ('Read binary input file {File}'.format (File = PayloadFile.name))
+ Payload = PayloadFile.read()
+ PayloadFile.close ()
+ except:
+ print ('GenerateCapsule: error: can not read binary input file {File}'.format (File = PayloadFile.name))
+ sys.exit (1)
+ PayloadDescriptorList.append (PayloadDescriptor (
+ Payload,
+ Guid,
+ FwVersion,
+ LowestSupportedVersion,
+ MonotonicCount,
+ HardwareInstance,
+ UpdateImageIndex,
+ SignToolPfxFile,
+ OpenSslSignerPrivateCertFile,
+ OpenSslOtherPublicCertFile,
+ OpenSslTrustedPublicCertFile,
+ SigningToolPath,
+ DepexExp
+ ))
+
+ def GenerateOutputJson (PayloadJsonDescriptorList):
+ PayloadJson = {
+ "Payloads" : [
+ {
+ "Guid": str(PayloadDescriptor.Guid).upper(),
+ "FwVersion": str(PayloadDescriptor.FwVersion),
+ "LowestSupportedVersion": str(PayloadDescriptor.LowestSupportedVersion),
+ "MonotonicCount": str(PayloadDescriptor.MonotonicCount),
+ "Payload": PayloadDescriptor.Payload,
+ "HardwareInstance": str(PayloadDescriptor.HardwareInstance),
+ "UpdateImageIndex": str(PayloadDescriptor.UpdateImageIndex),
+ "SignToolPfxFile": str(PayloadDescriptor.SignToolPfxFile),
+ "OpenSslSignerPrivateCertFile": str(PayloadDescriptor.OpenSslSignerPrivateCertFile),
+ "OpenSslOtherPublicCertFile": str(PayloadDescriptor.OpenSslOtherPublicCertFile),
+ "OpenSslTrustedPublicCertFile": str(PayloadDescriptor.OpenSslTrustedPublicCertFile),
+ "SigningToolPath": str(PayloadDescriptor.SigningToolPath),
+ "Dependencies" : str(PayloadDescriptor.DepexExp)
+ }for PayloadDescriptor in PayloadJsonDescriptorList
+ ]
+ }
+ OutputJsonFile = args.OutputFile.name + '.json'
+ if 'Payloads' in PayloadJson:
+ PayloadSection = PayloadJson ['Payloads']
+ Index = 0
+ for PayloadField in PayloadSection:
+ if PayloadJsonDescriptorList[Index].SignToolPfxFile is None:
+ del PayloadField ['SignToolPfxFile']
+ if PayloadJsonDescriptorList[Index].OpenSslSignerPrivateCertFile is None:
+ del PayloadField ['OpenSslSignerPrivateCertFile']
+ if PayloadJsonDescriptorList[Index].OpenSslOtherPublicCertFile is None:
+ del PayloadField ['OpenSslOtherPublicCertFile']
+ if PayloadJsonDescriptorList[Index].OpenSslTrustedPublicCertFile is None:
+ del PayloadField ['OpenSslTrustedPublicCertFile']
+ if PayloadJsonDescriptorList[Index].SigningToolPath is None:
+ del PayloadField ['SigningToolPath']
+ Index = Index + 1
+ Result = json.dumps (PayloadJson, indent=4, sort_keys=True, separators=(',', ': '))
+ with open (OutputJsonFile, 'w') as OutputFile:
+ OutputFile.write (Result)
+
+ def CheckArgumentConflict (args):
+ if args.Encode:
+ if args.InputFile:
+ print ('GenerateCapsule: error: Argument InputFile conflicts with Argument -j')
+ sys.exit (1)
+ if args.EmbeddedDriver:
+ print ('GenerateCapsule: error: Argument --embedded-driver conflicts with Argument -j')
+ sys.exit (1)
+ if args.Guid:
+ print ('GenerateCapsule: error: Argument --guid conflicts with Argument -j')
+ sys.exit (1)
+ if args.FwVersion:
+ print ('GenerateCapsule: error: Argument --fw-version conflicts with Argument -j')
+ sys.exit (1)
+ if args.LowestSupportedVersion:
+ print ('GenerateCapsule: error: Argument --lsv conflicts with Argument -j')
+ sys.exit (1)
+ if args.MonotonicCount:
+ print ('GenerateCapsule: error: Argument --monotonic-count conflicts with Argument -j')
+ sys.exit (1)
+ if args.HardwareInstance:
+ print ('GenerateCapsule: error: Argument --hardware-instance conflicts with Argument -j')
+ sys.exit (1)
+ if args.SignToolPfxFile:
+ print ('GenerateCapsule: error: Argument --pfx-file conflicts with Argument -j')
+ sys.exit (1)
+ if args.OpenSslSignerPrivateCertFile:
+ print ('GenerateCapsule: error: Argument --signer-private-cert conflicts with Argument -j')
+ sys.exit (1)
+ if args.OpenSslOtherPublicCertFile:
+ print ('GenerateCapsule: error: Argument --other-public-cert conflicts with Argument -j')
+ sys.exit (1)
+ if args.OpenSslTrustedPublicCertFile:
+ print ('GenerateCapsule: error: Argument --trusted-public-cert conflicts with Argument -j')
+ sys.exit (1)
+ if args.SigningToolPath:
+ print ('GenerateCapsule: error: Argument --signing-tool-path conflicts with Argument -j')
+ sys.exit (1)
+
+ class PayloadDescriptor (object):
+ def __init__(self,
+ Payload,
+ Guid,
+ FwVersion,
+ LowestSupportedVersion,
+ MonotonicCount = 0,
+ HardwareInstance = 0,
+ UpdateImageIndex = 1,
+ SignToolPfxFile = None,
+ OpenSslSignerPrivateCertFile = None,
+ OpenSslOtherPublicCertFile = None,
+ OpenSslTrustedPublicCertFile = None,
+ SigningToolPath = None,
+ DepexExp = None
+ ):
+ self.Payload = Payload
+ self.Guid = Guid
+ self.FwVersion = FwVersion
+ self.LowestSupportedVersion = LowestSupportedVersion
+ self.MonotonicCount = MonotonicCount
+ self.HardwareInstance = HardwareInstance
+ self.UpdateImageIndex = UpdateImageIndex
+ self.SignToolPfxFile = SignToolPfxFile
+ self.OpenSslSignerPrivateCertFile = OpenSslSignerPrivateCertFile
+ self.OpenSslOtherPublicCertFile = OpenSslOtherPublicCertFile
+ self.OpenSslTrustedPublicCertFile = OpenSslTrustedPublicCertFile
+ self.SigningToolPath = SigningToolPath
+ self.DepexExp = DepexExp
+
+ self.UseSignTool = self.SignToolPfxFile is not None
+ self.UseOpenSsl = (self.OpenSslSignerPrivateCertFile is not None and
+ self.OpenSslOtherPublicCertFile is not None and
+ self.OpenSslTrustedPublicCertFile is not None)
+ self.AnyOpenSsl = (self.OpenSslSignerPrivateCertFile is not None or
+ self.OpenSslOtherPublicCertFile is not None or
+ self.OpenSslTrustedPublicCertFile is not None)
+ self.UseDependency = self.DepexExp is not None
+
+ def Validate(self, args):
+ if self.UseSignTool and self.AnyOpenSsl:
+ raise argparse.ArgumentTypeError ('Providing both signtool and OpenSSL options is not supported')
+ if not self.UseSignTool and not self.UseOpenSsl and self.AnyOpenSsl:
+ if args.JsonFile:
+ raise argparse.ArgumentTypeError ('the following JSON fields are required for OpenSSL: OpenSslSignerPrivateCertFile, OpenSslOtherPublicCertFile, OpenSslTrustedPublicCertFile')
+ else:
+ raise argparse.ArgumentTypeError ('the following options are required for OpenSSL: --signer-private-cert, --other-public-cert, --trusted-public-cert')
+ if self.UseSignTool and platform.system() != 'Windows':
+ raise argparse.ArgumentTypeError ('Use of signtool is not supported on this operating system.')
+ if args.Encode:
+ if self.FwVersion is None or self.LowestSupportedVersion is None:
+ if args.JsonFile:
+ raise argparse.ArgumentTypeError ('the following JSON fields are required: FwVersion, LowestSupportedVersion')
+ else:
+ raise argparse.ArgumentTypeError ('the following options are required: --fw-version, --lsv')
+ if self.FwVersion > 0xFFFFFFFF:
+ if args.JsonFile:
+ raise argparse.ArgumentTypeError ('JSON field FwVersion must be an integer in range 0x0..0xffffffff')
+ else:
+ raise argparse.ArgumentTypeError ('--fw-version must be an integer in range 0x0..0xffffffff')
+ if self.LowestSupportedVersion > 0xFFFFFFFF:
+ if args.JsonFile:
+ raise argparse.ArgumentTypeError ('JSON field LowestSupportedVersion must be an integer in range 0x0..0xffffffff')
+ else:
+ raise argparse.ArgumentTypeError ('--lsv must be an integer in range 0x0..0xffffffff')
+
+ if args.Encode:
+ if self.Guid is None:
+ if args.JsonFile:
+ raise argparse.ArgumentTypeError ('the following JSON field is required: Guid')
+ else:
+ raise argparse.ArgumentTypeError ('the following option is required: --guid')
+ if self.HardwareInstance > 0xFFFFFFFFFFFFFFFF:
+ if args.JsonFile:
+ raise argparse.ArgumentTypeError ('JSON field HardwareInstance must be an integer in range 0x0..0xffffffffffffffff')
+ else:
+ raise argparse.ArgumentTypeError ('--hardware-instance must be an integer in range 0x0..0xffffffffffffffff')
+ if self.MonotonicCount > 0xFFFFFFFFFFFFFFFF:
+ if args.JsonFile:
+ raise argparse.ArgumentTypeError ('JSON field MonotonicCount must be an integer in range 0x0..0xffffffffffffffff')
+ else:
+ raise argparse.ArgumentTypeError ('--monotonic-count must be an integer in range 0x0..0xffffffffffffffff')
+ if self.UpdateImageIndex >0xFF:
+ if args.JsonFile:
+ raise argparse.ArgumentTypeError ('JSON field UpdateImageIndex must be an integer in range 0x0..0xff')
+ else:
+ raise argparse.ArgumentTypeError ('--update-image-index must be an integer in range 0x0..0xff')
+
+ if self.UseSignTool:
+ self.SignToolPfxFile.close()
+ self.SignToolPfxFile = self.SignToolPfxFile.name
+ if self.UseOpenSsl:
+ self.OpenSslSignerPrivateCertFile.close()
+ self.OpenSslOtherPublicCertFile.close()
+ self.OpenSslTrustedPublicCertFile.close()
+ self.OpenSslSignerPrivateCertFile = self.OpenSslSignerPrivateCertFile.name
+ self.OpenSslOtherPublicCertFile = self.OpenSslOtherPublicCertFile.name
+ self.OpenSslTrustedPublicCertFile = self.OpenSslTrustedPublicCertFile.name
+
+ #
+ # Perform additional argument verification
+ #
+ if args.Encode:
+ if 'PersistAcrossReset' not in args.CapsuleFlag:
+ if 'InitiateReset' in args.CapsuleFlag:
+ raise argparse.ArgumentTypeError ('--capflag InitiateReset also requires --capflag PersistAcrossReset')
+ if args.CapsuleOemFlag > 0xFFFF:
+ raise argparse.ArgumentTypeError ('--capoemflag must be an integer between 0x0000 and 0xffff')
+
+ return True
+
+
+ def Encode (PayloadDescriptorList, EmbeddedDriverDescriptorList, Buffer):
+ if args.JsonFile:
+ CheckArgumentConflict(args)
+ try:
+ Json = json.loads (args.JsonFile.read ())
+ except:
+ print ('GenerateCapsule: error: {JSONFile} loads failure. '.format (JSONFile = args.JsonFile))
+ sys.exit (1)
+ EncodeJsonFileParse(Json)
+ else:
+ for Driver in args.EmbeddedDriver:
+ EmbeddedDriverDescriptorList.append (Driver.read())
+ PayloadDescriptorList.append (PayloadDescriptor (
+ Buffer,
+ args.Guid,
+ args.FwVersion,
+ args.LowestSupportedVersion,
+ args.MonotonicCount,
+ args.HardwareInstance,
+ args.UpdateImageIndex,
+ args.SignToolPfxFile,
+ args.OpenSslSignerPrivateCertFile,
+ args.OpenSslOtherPublicCertFile,
+ args.OpenSslTrustedPublicCertFile,
+ args.SigningToolPath,
+ None
+ ))
+ for SinglePayloadDescriptor in PayloadDescriptorList:
+ try:
+ SinglePayloadDescriptor.Validate (args)
+ except Exception as Msg:
+ print ('GenerateCapsule: error:' + str(Msg))
+ sys.exit (1)
+ for SinglePayloadDescriptor in PayloadDescriptorList:
+ ImageCapsuleSupport = 0x0000000000000000
+ Result = SinglePayloadDescriptor.Payload
+ try:
+ FmpPayloadHeader.FwVersion = SinglePayloadDescriptor.FwVersion
+ FmpPayloadHeader.LowestSupportedVersion = SinglePayloadDescriptor.LowestSupportedVersion
+ FmpPayloadHeader.Payload = SinglePayloadDescriptor.Payload
+ Result = FmpPayloadHeader.Encode ()
+ if args.Verbose:
+ FmpPayloadHeader.DumpInfo ()
+ except:
+ print ('GenerateCapsule: error: can not encode FMP Payload Header')
+ sys.exit (1)
+ if SinglePayloadDescriptor.UseDependency:
+ CapsuleDependency.Payload = Result
+ CapsuleDependency.DepexExp = SinglePayloadDescriptor.DepexExp
+ ImageCapsuleSupport |= FmpCapsuleHeader.CAPSULE_SUPPORT_DEPENDENCY
+ Result = CapsuleDependency.Encode ()
+ if args.Verbose:
+ CapsuleDependency.DumpInfo ()
+ if SinglePayloadDescriptor.UseOpenSsl or SinglePayloadDescriptor.UseSignTool:
+ #
+ # Sign image with 64-bit MonotonicCount appended to end of image
+ #
+ try:
+ if SinglePayloadDescriptor.UseSignTool:
+ CertData = SignPayloadSignTool (
+ Result + struct.pack ('<Q', SinglePayloadDescriptor.MonotonicCount),
+ SinglePayloadDescriptor.SigningToolPath,
+ SinglePayloadDescriptor.SignToolPfxFile,
+ Verbose = args.Verbose
+ )
+ else:
+ CertData = SignPayloadOpenSsl (
+ Result + struct.pack ('<Q', SinglePayloadDescriptor.MonotonicCount),
+ SinglePayloadDescriptor.SigningToolPath,
+ SinglePayloadDescriptor.OpenSslSignerPrivateCertFile,
+ SinglePayloadDescriptor.OpenSslOtherPublicCertFile,
+ SinglePayloadDescriptor.OpenSslTrustedPublicCertFile,
+ Verbose = args.Verbose
+ )
+ except Exception as Msg:
+ print ('GenerateCapsule: error: can not sign payload \n' + str(Msg))
+ sys.exit (1)
+
+ try:
+ FmpAuthHeader.MonotonicCount = SinglePayloadDescriptor.MonotonicCount
+ FmpAuthHeader.CertData = CertData
+ FmpAuthHeader.Payload = Result
+ ImageCapsuleSupport |= FmpCapsuleHeader.CAPSULE_SUPPORT_AUTHENTICATION
+ Result = FmpAuthHeader.Encode ()
+ if args.Verbose:
+ FmpAuthHeader.DumpInfo ()
+ except:
+ print ('GenerateCapsule: error: can not encode FMP Auth Header')
+ sys.exit (1)
+ FmpCapsuleHeader.AddPayload (SinglePayloadDescriptor.Guid, Result, HardwareInstance = SinglePayloadDescriptor.HardwareInstance, UpdateImageIndex = SinglePayloadDescriptor.UpdateImageIndex, CapsuleSupport = ImageCapsuleSupport)
+ try:
+ for EmbeddedDriver in EmbeddedDriverDescriptorList:
+ FmpCapsuleHeader.AddEmbeddedDriver(EmbeddedDriver)
+
+ Result = FmpCapsuleHeader.Encode ()
+ if args.Verbose:
+ FmpCapsuleHeader.DumpInfo ()
+ except:
+ print ('GenerateCapsule: error: can not encode FMP Capsule Header')
+ sys.exit (1)
+
+ try:
+ UefiCapsuleHeader.OemFlags = args.CapsuleOemFlag
+ UefiCapsuleHeader.PersistAcrossReset = 'PersistAcrossReset' in args.CapsuleFlag
+ UefiCapsuleHeader.PopulateSystemTable = False
+ UefiCapsuleHeader.InitiateReset = 'InitiateReset' in args.CapsuleFlag
+ UefiCapsuleHeader.Payload = Result
+ Result = UefiCapsuleHeader.Encode ()
+ if args.Verbose:
+ UefiCapsuleHeader.DumpInfo ()
+ except:
+ print ('GenerateCapsule: error: can not encode UEFI Capsule Header')
+ sys.exit (1)
+ try:
+ if args.Verbose:
+ print ('Write binary output file {File}'.format (File = args.OutputFile.name))
+ args.OutputFile.write (Result)
+ args.OutputFile.close ()
+ except:
+ print ('GenerateCapsule: error: can not write binary output file {File}'.format (File = args.OutputFile.name))
+ sys.exit (1)
+
+ def Decode (PayloadDescriptorList, PayloadJsonDescriptorList, Buffer):
+ if args.JsonFile:
+ CheckArgumentConflict(args)
+ #
+ # Parse payload descriptors from JSON
+ #
+ try:
+ Json = json.loads (args.JsonFile.read())
+ except:
+ print ('GenerateCapsule: error: {JSONFile} loads failure. '.format (JSONFile = args.JsonFile))
+ sys.exit (1)
+ DecodeJsonFileParse (Json)
+ else:
+ PayloadDescriptorList.append (PayloadDescriptor (
+ Buffer,
+ args.Guid,
+ args.FwVersion,
+ args.LowestSupportedVersion,
+ args.MonotonicCount,
+ args.HardwareInstance,
+ args.UpdateImageIndex,
+ args.SignToolPfxFile,
+ args.OpenSslSignerPrivateCertFile,
+ args.OpenSslOtherPublicCertFile,
+ args.OpenSslTrustedPublicCertFile,
+ args.SigningToolPath,
+ None
+ ))
+ #
+ # Perform additional verification on payload descriptors
+ #
+ for SinglePayloadDescriptor in PayloadDescriptorList:
+ try:
+ SinglePayloadDescriptor.Validate (args)
+ except Exception as Msg:
+ print ('GenerateCapsule: error:' + str(Msg))
+ sys.exit (1)
+ try:
+ Result = UefiCapsuleHeader.Decode (Buffer)
+ if len (Result) > 0:
+ Result = FmpCapsuleHeader.Decode (Result)
+ if args.JsonFile:
+ if FmpCapsuleHeader.PayloadItemCount != len (PayloadDescriptorList):
+ CapsulePayloadNum = FmpCapsuleHeader.PayloadItemCount
+ JsonPayloadNum = len (PayloadDescriptorList)
+ print ('GenerateCapsule: Decode error: {JsonPayloadNumber} payloads in JSON file {File} and {CapsulePayloadNumber} payloads in Capsule {CapsuleName}'.format (JsonPayloadNumber = JsonPayloadNum, File = args.JsonFile.name, CapsulePayloadNumber = CapsulePayloadNum, CapsuleName = args.InputFile.name))
+ sys.exit (1)
+ for Index in range (0, FmpCapsuleHeader.PayloadItemCount):
+ if Index < len (PayloadDescriptorList):
+ GUID = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).UpdateImageTypeId
+ HardwareInstance = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).UpdateHardwareInstance
+ UpdateImageIndex = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).UpdateImageIndex
+ if PayloadDescriptorList[Index].Guid != GUID or PayloadDescriptorList[Index].HardwareInstance != HardwareInstance:
+ print ('GenerateCapsule: Decode error: Guid or HardwareInstance pair in input JSON file {File} does not match the payload {PayloadIndex} in Capsule {InputCapsule}'.format (File = args.JsonFile.name, PayloadIndex = Index + 1, InputCapsule = args.InputFile.name))
+ sys.exit (1)
+ PayloadDescriptorList[Index].Payload = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).Payload
+ DecodeJsonOutput = args.OutputFile.name + '.Payload.{Index:d}.bin'.format (Index = Index + 1)
+ PayloadJsonDescriptorList.append (PayloadDescriptor (
+ DecodeJsonOutput,
+ GUID,
+ None,
+ None,
+ None,
+ HardwareInstance,
+ UpdateImageIndex,
+ PayloadDescriptorList[Index].SignToolPfxFile,
+ PayloadDescriptorList[Index].OpenSslSignerPrivateCertFile,
+ PayloadDescriptorList[Index].OpenSslOtherPublicCertFile,
+ PayloadDescriptorList[Index].OpenSslTrustedPublicCertFile,
+ PayloadDescriptorList[Index].SigningToolPath,
+ None
+ ))
+ else:
+ PayloadDescriptorList[0].Payload = FmpCapsuleHeader.GetFmpCapsuleImageHeader (0).Payload
+ for Index in range (0, FmpCapsuleHeader.PayloadItemCount):
+ if Index > 0:
+ PayloadDecodeFile = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).Payload
+ PayloadDescriptorList.append (PayloadDescriptor (PayloadDecodeFile,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None
+ ))
+ GUID = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).UpdateImageTypeId
+ HardwareInstance = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).UpdateHardwareInstance
+ UpdateImageIndex = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).UpdateImageIndex
+ DecodeJsonOutput = args.OutputFile.name + '.Payload.{Index:d}.bin'.format (Index = Index + 1)
+ PayloadJsonDescriptorList.append (PayloadDescriptor (
+ DecodeJsonOutput,
+ GUID,
+ None,
+ None,
+ None,
+ HardwareInstance,
+ UpdateImageIndex,
+ PayloadDescriptorList[Index].SignToolPfxFile,
+ PayloadDescriptorList[Index].OpenSslSignerPrivateCertFile,
+ PayloadDescriptorList[Index].OpenSslOtherPublicCertFile,
+ PayloadDescriptorList[Index].OpenSslTrustedPublicCertFile,
+ PayloadDescriptorList[Index].SigningToolPath,
+ None
+ ))
+ JsonIndex = 0
+ for SinglePayloadDescriptor in PayloadDescriptorList:
+ if args.Verbose:
+ print ('========')
+ UefiCapsuleHeader.DumpInfo ()
+ print ('--------')
+ FmpCapsuleHeader.DumpInfo ()
+ if FmpAuthHeader.IsSigned(SinglePayloadDescriptor.Payload):
+ if not SinglePayloadDescriptor.UseOpenSsl and not SinglePayloadDescriptor.UseSignTool:
+ print ('GenerateCapsule: decode warning: can not verify singed payload without cert or pfx file. Index = {Index}'.format (Index = JsonIndex + 1))
+ SinglePayloadDescriptor.Payload = FmpAuthHeader.Decode (SinglePayloadDescriptor.Payload)
+ PayloadJsonDescriptorList[JsonIndex].MonotonicCount = FmpAuthHeader.MonotonicCount
+ if args.Verbose:
+ print ('--------')
+ FmpAuthHeader.DumpInfo ()
+
+ #
+ # Verify Image with 64-bit MonotonicCount appended to end of image
+ #
+ try:
+ if SinglePayloadDescriptor.UseSignTool:
+ CertData = VerifyPayloadSignTool (
+ FmpAuthHeader.Payload + struct.pack ('<Q', FmpAuthHeader.MonotonicCount),
+ FmpAuthHeader.CertData,
+ SinglePayloadDescriptor.SigningToolPath,
+ SinglePayloadDescriptor.SignToolPfxFile,
+ Verbose = args.Verbose
+ )
+ else:
+ CertData = VerifyPayloadOpenSsl (
+ FmpAuthHeader.Payload + struct.pack ('<Q', FmpAuthHeader.MonotonicCount),
+ FmpAuthHeader.CertData,
+ SinglePayloadDescriptor.SigningToolPath,
+ SinglePayloadDescriptor.OpenSslSignerPrivateCertFile,
+ SinglePayloadDescriptor.OpenSslOtherPublicCertFile,
+ SinglePayloadDescriptor.OpenSslTrustedPublicCertFile,
+ Verbose = args.Verbose
+ )
+ except Exception as Msg:
+ print ('GenerateCapsule: warning: payload verification failed Index = {Index} \n'.format (Index = JsonIndex + 1) + str(Msg))
+ else:
+ if args.Verbose:
+ print ('--------')
+ print ('No EFI_FIRMWARE_IMAGE_AUTHENTICATION')
+
+ PayloadSignature = struct.unpack ('<I', SinglePayloadDescriptor.Payload[0:4])
+ if PayloadSignature != FmpPayloadHeader.Signature:
+ SinglePayloadDescriptor.UseDependency = True
+ try:
+ SinglePayloadDescriptor.Payload = CapsuleDependency.Decode (SinglePayloadDescriptor.Payload)
+ PayloadJsonDescriptorList[JsonIndex].DepexExp = CapsuleDependency.DepexExp
+ if args.Verbose:
+ print ('--------')
+ CapsuleDependency.DumpInfo ()
+ except Exception as Msg:
+ print ('GenerateCapsule: error: invalid dependency expression')
+ else:
+ if args.Verbose:
+ print ('--------')
+ print ('No EFI_FIRMWARE_IMAGE_DEP')
+
+ try:
+ SinglePayloadDescriptor.Payload = FmpPayloadHeader.Decode (SinglePayloadDescriptor.Payload)
+ PayloadJsonDescriptorList[JsonIndex].FwVersion = FmpPayloadHeader.FwVersion
+ PayloadJsonDescriptorList[JsonIndex].LowestSupportedVersion = FmpPayloadHeader.LowestSupportedVersion
+ JsonIndex = JsonIndex + 1
+ if args.Verbose:
+ print ('--------')
+ FmpPayloadHeader.DumpInfo ()
+ print ('========')
+ except:
+ if args.Verbose:
+ print ('--------')
+ print ('No FMP_PAYLOAD_HEADER')
+ print ('========')
+ sys.exit (1)
+ #
+ # Write embedded driver file(s)
+ #
+ for Index in range (0, FmpCapsuleHeader.EmbeddedDriverCount):
+ EmbeddedDriverBuffer = FmpCapsuleHeader.GetEmbeddedDriver (Index)
+ EmbeddedDriverPath = args.OutputFile.name + '.EmbeddedDriver.{Index:d}.efi'.format (Index = Index + 1)
+ try:
+ if args.Verbose:
+ print ('Write embedded driver file {File}'.format (File = EmbeddedDriverPath))
+ with open (EmbeddedDriverPath, 'wb') as EmbeddedDriverFile:
+ EmbeddedDriverFile.write (EmbeddedDriverBuffer)
+ except:
+ print ('GenerateCapsule: error: can not write embedded driver file {File}'.format (File = EmbeddedDriverPath))
+ sys.exit (1)
+
+ except:
+ print ('GenerateCapsule: error: can not decode capsule')
+ sys.exit (1)
+ GenerateOutputJson(PayloadJsonDescriptorList)
+ PayloadIndex = 0
+ for SinglePayloadDescriptor in PayloadDescriptorList:
+ if args.OutputFile is None:
+ print ('GenerateCapsule: Decode error: OutputFile is needed for decode output')
+ sys.exit (1)
+ try:
+ if args.Verbose:
+ print ('Write binary output file {File}'.format (File = args.OutputFile.name))
+ PayloadDecodePath = args.OutputFile.name + '.Payload.{Index:d}.bin'.format (Index = PayloadIndex + 1)
+ with open (PayloadDecodePath, 'wb') as PayloadDecodeFile:
+ PayloadDecodeFile.write (SinglePayloadDescriptor.Payload)
+ PayloadIndex = PayloadIndex + 1
+ except:
+ print ('GenerateCapsule: error: can not write binary output file {File}'.format (File = SinglePayloadDescriptor.OutputFile.name))
+ sys.exit (1)
+
+ def DumpInfo (Buffer, args):
+ if args.OutputFile is not None:
+ raise argparse.ArgumentTypeError ('the following option is not supported for dumpinfo operations: --output')
+ try:
+ Result = UefiCapsuleHeader.Decode (Buffer)
+ print ('========')
+ UefiCapsuleHeader.DumpInfo ()
+ if len (Result) > 0:
+ FmpCapsuleHeader.Decode (Result)
+ print ('--------')
+ FmpCapsuleHeader.DumpInfo ()
+ for Index in range (0, FmpCapsuleHeader.PayloadItemCount):
+ Result = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).Payload
+ try:
+ Result = FmpAuthHeader.Decode (Result)
+ print ('--------')
+ FmpAuthHeader.DumpInfo ()
+ except:
+ print ('--------')
+ print ('No EFI_FIRMWARE_IMAGE_AUTHENTICATION')
+
+ PayloadSignature = struct.unpack ('<I', Result[0:4])
+ if PayloadSignature != FmpPayloadHeader.Signature:
+ try:
+ Result = CapsuleDependency.Decode (Result)
+ print ('--------')
+ CapsuleDependency.DumpInfo ()
+ except:
+ print ('GenerateCapsule: error: invalid dependency expression')
+ else:
+ print ('--------')
+ print ('No EFI_FIRMWARE_IMAGE_DEP')
+ try:
+ Result = FmpPayloadHeader.Decode (Result)
+ print ('--------')
+ FmpPayloadHeader.DumpInfo ()
+ except:
+ print ('--------')
+ print ('No FMP_PAYLOAD_HEADER')
+ print ('========')
+ except:
+ print ('GenerateCapsule: error: can not decode capsule')
+ sys.exit (1)
+ #
+ # Create command line argument parser object
+ #
+ parser = argparse.ArgumentParser (
+ prog = __prog__,
+ description = __description__ + __copyright__,
+ conflict_handler = 'resolve',
+ fromfile_prefix_chars = '@'
+ )
+ parser.convert_arg_line_to_args = convert_arg_line_to_args
+
+ #
+ # Add input and output file arguments
+ #
+ parser.add_argument("InputFile", type = argparse.FileType('rb'), nargs='?',
+ help = "Input binary payload filename.")
+ parser.add_argument("-o", "--output", dest = 'OutputFile', type = argparse.FileType('wb'),
+ help = "Output filename.")
+ #
+ # Add group for -e and -d flags that are mutually exclusive and required
+ #
+ group = parser.add_mutually_exclusive_group (required = True)
+ group.add_argument ("-e", "--encode", dest = 'Encode', action = "store_true",
+ help = "Encode file")
+ group.add_argument ("-d", "--decode", dest = 'Decode', action = "store_true",
+ help = "Decode file")
+ group.add_argument ("--dump-info", dest = 'DumpInfo', action = "store_true",
+ help = "Display FMP Payload Header information")
+ #
+ # Add optional arguments for this command
+ #
+ parser.add_argument ("-j", "--json-file", dest = 'JsonFile', type=argparse.FileType('r'),
+ help = "JSON configuration file for multiple payloads and embedded drivers.")
+ parser.add_argument ("--capflag", dest = 'CapsuleFlag', action='append', default = [],
+ choices=['PersistAcrossReset', 'InitiateReset'],
+ help = "Capsule flag can be PersistAcrossReset or InitiateReset or not set")
+ parser.add_argument ("--capoemflag", dest = 'CapsuleOemFlag', type = ValidateUnsignedInteger, default = 0x0000,
+ help = "Capsule OEM Flag is an integer between 0x0000 and 0xffff.")
+
+ parser.add_argument ("--guid", dest = 'Guid', type = ValidateRegistryFormatGuid,
+ help = "The FMP/ESRT GUID in registry format. Required for single payload encode operations.")
+ parser.add_argument ("--hardware-instance", dest = 'HardwareInstance', type = ValidateUnsignedInteger, default = 0x0000000000000000,
+ help = "The 64-bit hardware instance. The default is 0x0000000000000000")
+
+
+ parser.add_argument ("--monotonic-count", dest = 'MonotonicCount', type = ValidateUnsignedInteger, default = 0x0000000000000000,
+ help = "64-bit monotonic count value in header. Default is 0x0000000000000000.")
+
+ parser.add_argument ("--fw-version", dest = 'FwVersion', type = ValidateUnsignedInteger,
+ help = "The 32-bit version of the binary payload (e.g. 0x11223344 or 5678). Required for encode operations.")
+ parser.add_argument ("--lsv", dest = 'LowestSupportedVersion', type = ValidateUnsignedInteger,
+ help = "The 32-bit lowest supported version of the binary payload (e.g. 0x11223344 or 5678). Required for encode operations.")
+
+ parser.add_argument ("--pfx-file", dest='SignToolPfxFile', type=argparse.FileType('rb'),
+ help="signtool PFX certificate filename.")
+
+ parser.add_argument ("--signer-private-cert", dest='OpenSslSignerPrivateCertFile', type=argparse.FileType('rb'),
+ help="OpenSSL signer private certificate filename.")
+ parser.add_argument ("--other-public-cert", dest='OpenSslOtherPublicCertFile', type=argparse.FileType('rb'),
+ help="OpenSSL other public certificate filename.")
+ parser.add_argument ("--trusted-public-cert", dest='OpenSslTrustedPublicCertFile', type=argparse.FileType('rb'),
+ help="OpenSSL trusted public certificate filename.")
+
+ parser.add_argument ("--signing-tool-path", dest = 'SigningToolPath',
+ help = "Path to signtool or OpenSSL tool. Optional if path to tools are already in PATH.")
+
+ parser.add_argument ("--embedded-driver", dest = 'EmbeddedDriver', type = argparse.FileType('rb'), action='append', default = [],
+ help = "Path to embedded UEFI driver to add to capsule.")
+
+ #
+ # Add optional arguments common to all operations
+ #
+ parser.add_argument ('--version', action='version', version='%(prog)s ' + __version__)
+ parser.add_argument ("-v", "--verbose", dest = 'Verbose', action = "store_true",
+ help = "Turn on verbose output with informational messages printed, including capsule headers and warning messages.")
+ parser.add_argument ("-q", "--quiet", dest = 'Quiet', action = "store_true",
+ help = "Disable all messages except fatal errors.")
+ parser.add_argument ("--debug", dest = 'Debug', type = int, metavar = '[0-9]', choices = range (0, 10), default = 0,
+ help = "Set debug level")
+ parser.add_argument ("--update-image-index", dest = 'UpdateImageIndex', type = ValidateUnsignedInteger, default = 0x01, help = "unique number identifying the firmware image within the device ")
+
+ #
+ # Parse command line arguments
+ #
+ args = parser.parse_args()
+
+ #
+ # Read binary input file
+ #
+ Buffer = ''
+ if args.InputFile:
+ if os.path.getsize (args.InputFile.name) == 0:
+ print ('GenerateCapsule: error: InputFile {File} is empty'.format (File = args.InputFile.name))
+ sys.exit (1)
+ try:
+ if args.Verbose:
+ print ('Read binary input file {File}'.format (File = args.InputFile.name))
+ Buffer = args.InputFile.read ()
+ args.InputFile.close ()
+ except:
+ print ('GenerateCapsule: error: can not read binary input file {File}'.format (File = args.InputFile.name))
+ sys.exit (1)
+
+ #
+ # Create objects
+ #
+ UefiCapsuleHeader = UefiCapsuleHeaderClass ()
+ FmpCapsuleHeader = FmpCapsuleHeaderClass ()
+ FmpAuthHeader = FmpAuthHeaderClass ()
+ FmpPayloadHeader = FmpPayloadHeaderClass ()
+ CapsuleDependency = CapsuleDependencyClass ()
+
+ EmbeddedDriverDescriptorList = []
+ PayloadDescriptorList = []
+ PayloadJsonDescriptorList = []
+
+ #
+ #Encode Operation
+ #
+ if args.Encode:
+ Encode (PayloadDescriptorList, EmbeddedDriverDescriptorList, Buffer)
+
+ #
+ #Decode Operation
+ #
+ if args.Decode:
+ Decode (PayloadDescriptorList, PayloadJsonDescriptorList, Buffer)
+
+ #
+ #Dump Info Operation
+ #
+ if args.DumpInfo:
+ DumpInfo (Buffer, args)
+
+ if args.Verbose:
+ print('Success')
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Capsule/GenerateWindowsDriver.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Capsule/GenerateWindowsDriver.py
new file mode 100755
index 00000000..9486b0e4
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Capsule/GenerateWindowsDriver.py
@@ -0,0 +1,120 @@
+## @file
+# Generate a capsule windows driver.
+#
+# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+GenerateWindowsDriver
+'''
+
+import sys
+import argparse
+import uuid
+import struct
+import subprocess
+import os
+import tempfile
+import shutil
+import platform
+import re
+import logging
+from WindowsCapsuleSupportHelper import WindowsCapsuleSupportHelper
+from Common.Uefi.Capsule.FmpCapsuleHeader import FmpCapsuleHeaderClass
+from Common.Uefi.Capsule.UefiCapsuleHeader import UefiCapsuleHeaderClass
+
+#
+# Globals for help information
+#
+__prog__ = 'GenerateWindowsDriver'
+__version__ = '0.0'
+__copyright__ = 'Copyright (c) 2019, Intel Corporation. All rights reserved.'
+__description__ = 'Generate Capsule Windows Driver.\n'
+
+def GetCapGuid (InputFile):
+ with open(InputFile, 'rb') as File:
+ Buffer = File.read()
+ try:
+ Result = UefiCapsuleHeader.Decode (Buffer)
+ if len (Result) > 0:
+ FmpCapsuleHeader.Decode (Result)
+ for index in range (0, FmpCapsuleHeader.PayloadItemCount):
+ Guid = FmpCapsuleHeader.GetFmpCapsuleImageHeader (index).UpdateImageTypeId
+ return Guid
+ except:
+ print ('GenerateCapsule: error: can not decode capsule')
+ sys.exit (1)
+
+def ArgCheck(args):
+ Version = args.CapsuleVersion_DotString.split('.')
+
+ if len(Version) != 4:
+ logging.critical("Name invalid: '%s'", args.CapsuleVersion_DotString)
+ raise ValueError("Name invalid.")
+ for sub in Version:
+ if int(sub, 16) > 65536:
+ logging.critical("Name invalid: '%s'", args.CapsuleVersion_DotString)
+ raise ValueError("Name exceed limit 65536.")
+
+ if not (re.compile(r'[\a-fA-F0-9]*$')).match(args.CapsuleVersion_DotString):
+ logging.critical("Name invalid: '%s'", args.CapsuleVersion_DotString)
+ raise ValueError("Name has invalid chars.")
+
+def CapsuleGuidCheck(InputFile, Guid):
+ CapGuid = GetCapGuid(InputFile)
+ if (str(Guid).lower() != str(CapGuid)):
+ print('GenerateWindowsDriver error: Different Guid from Capsule')
+ sys.exit(1)
+
+if __name__ == '__main__':
+ def convert_arg_line_to_args(arg_line):
+ for arg in arg_line.split():
+ if not arg.strip():
+ continue
+ yield arg
+
+ parser = argparse.ArgumentParser (
+ prog = __prog__,
+ description = __description__ + __copyright__,
+ conflict_handler = 'resolve',
+ fromfile_prefix_chars = '@'
+ )
+ parser.convert_arg_line_to_args = convert_arg_line_to_args
+ parser.add_argument("--output-folder", dest = 'OutputFolder', help = "firmware resource update driver package output folder.")
+ parser.add_argument("--product-fmp-guid", dest = 'ProductFmpGuid', help = "firmware GUID of resource update driver package")
+ parser.add_argument("--capsuleversion-dotstring", dest = 'CapsuleVersion_DotString', help = "firmware version with date on which update driver package is authored")
+ parser.add_argument("--capsuleversion-hexstring", dest = 'CapsuleVersion_HexString', help = "firmware version in Hex of update driver package")
+ parser.add_argument("--product-fw-provider", dest = 'ProductFwProvider', help = "vendor/provider of entire firmware resource update driver package")
+ parser.add_argument("--product-fw-mfg-name", dest = 'ProductFwMfgName', help = "manufacturer/vendor of firmware resource update driver package")
+ parser.add_argument("--product-fw-desc", dest = "ProductFwDesc", help = "description about resource update driver")
+ parser.add_argument("--capsule-file-name", dest = 'CapsuleFileName', help ="firmware resource image file")
+ parser.add_argument("--pfx-file", dest = 'PfxFile', help = "pfx file path used to sign resource update driver")
+ parser.add_argument("--arch", dest = 'Arch', help = "supported architecture:arm/x64/amd64/arm64/aarch64", default = 'amd64')
+ parser.add_argument("--operating-system-string", dest = 'OperatingSystemString', help = "supported operating system:win10/10/10_au/10_rs2/10_rs3/10_rs4/server10/server2016/serverrs2/serverrs3/serverrs4", default = "win10")
+
+ args = parser.parse_args()
+ InputFile = os.path.join(args.OutputFolder, '') + args.CapsuleFileName
+ UefiCapsuleHeader = UefiCapsuleHeaderClass ()
+ FmpCapsuleHeader = FmpCapsuleHeaderClass ()
+ CapsuleGuidCheck(InputFile, args.ProductFmpGuid)
+ ArgCheck(args)
+ ProductName = os.path.splitext(args.CapsuleFileName)[0]
+ WindowsDriver = WindowsCapsuleSupportHelper ()
+
+ WindowsDriver.PackageWindowsCapsuleFiles (
+ args.OutputFolder,
+ ProductName,
+ args.ProductFmpGuid,
+ args.CapsuleVersion_DotString,
+ args.CapsuleVersion_HexString,
+ args.ProductFwProvider,
+ args.ProductFwMfgName,
+ args.ProductFwDesc,
+ args.CapsuleFileName,
+ args.PfxFile,
+ None,
+ None,
+ args.Arch,
+ args.OperatingSystemString
+ )
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Capsule/WindowsCapsuleSupportHelper.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Capsule/WindowsCapsuleSupportHelper.py
new file mode 100755
index 00000000..9b049bdd
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Capsule/WindowsCapsuleSupportHelper.py
@@ -0,0 +1,64 @@
+##
+# UefiBuild Plugin that supports Window Capsule files based on the
+# Windows Firmware Update Platform spec.
+# Creates INF, Cat, and then signs it
+#
+# To install run pip install --upgrade edk2-pytool-library
+# edk2-pytool-library-0.9.1 is required.
+#
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+##
+
+import sys
+import re
+import datetime
+import os
+import logging
+from edk2toollib.windows.capsule.cat_generator import CatGenerator
+from edk2toollib.windows.capsule.inf_generator import InfGenerator
+from edk2toollib.utility_functions import CatalogSignWithSignTool
+from edk2toollib.windows.locate_tools import FindToolInWinSdk
+
+class WindowsCapsuleSupportHelper(object):
+
+ def RegisterHelpers(self, obj):
+ fp = os.path.abspath(__file__)
+ obj.Register("PackageWindowsCapsuleFiles", WindowsCapsuleSupportHelper.PackageWindowsCapsuleFiles, fp)
+
+
+ @staticmethod
+ def PackageWindowsCapsuleFiles(OutputFolder, ProductName, ProductFmpGuid, CapsuleVersion_DotString,
+ CapsuleVersion_HexString, ProductFwProvider, ProductFwMfgName, ProductFwDesc, CapsuleFileName, PfxFile=None, PfxPass=None,
+ Rollback=False, Arch='amd64', OperatingSystem_String='Win10'):
+
+ logging.debug("CapsulePackage: Create Windows Capsule Files")
+
+ #Make INF
+ InfFilePath = os.path.join(OutputFolder, ProductName + ".inf")
+ InfTool = InfGenerator(ProductName, ProductFwProvider, ProductFmpGuid, Arch, ProductFwDesc, CapsuleVersion_DotString, CapsuleVersion_HexString)
+ InfTool.Manufacturer = ProductFwMfgName #optional
+ ret = InfTool.MakeInf(InfFilePath, CapsuleFileName, Rollback)
+ if(ret != 0):
+ raise Exception("CreateWindowsInf Failed with errorcode %d" % ret)
+
+ #Make CAT
+ CatFilePath = os.path.realpath(os.path.join(OutputFolder, ProductName + ".cat"))
+ CatTool = CatGenerator(Arch, OperatingSystem_String)
+ ret = CatTool.MakeCat(CatFilePath)
+
+ if(ret != 0):
+ raise Exception("Creating Cat file Failed with errorcode %d" % ret)
+
+ if(PfxFile is not None):
+ #Find Signtool
+ SignToolPath = FindToolInWinSdk("signtool.exe")
+ if not os.path.exists(SignToolPath):
+ raise Exception("Can't find signtool on this machine.")
+ #dev sign the cat file
+ ret = CatalogSignWithSignTool(SignToolPath, CatFilePath, PfxFile, PfxPass)
+ if(ret != 0):
+ raise Exception("Signing Cat file Failed with errorcode %d" % ret)
+
+ return ret
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/BuildToolError.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/BuildToolError.py
new file mode 100644
index 00000000..c0a3269b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/BuildToolError.py
@@ -0,0 +1,160 @@
+## @file
+# Standardized Error Handling infrastructures.
+#
+# Copyright (c) 2007 - 2016, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+FILE_OPEN_FAILURE = 1
+FILE_WRITE_FAILURE = 2
+FILE_PARSE_FAILURE = 3
+FILE_READ_FAILURE = 4
+FILE_CREATE_FAILURE = 5
+FILE_CHECKSUM_FAILURE = 6
+FILE_COMPRESS_FAILURE = 7
+FILE_DECOMPRESS_FAILURE = 8
+FILE_MOVE_FAILURE = 9
+FILE_DELETE_FAILURE = 10
+FILE_COPY_FAILURE = 11
+FILE_POSITIONING_FAILURE = 12
+FILE_ALREADY_EXIST = 13
+FILE_NOT_FOUND = 14
+FILE_TYPE_MISMATCH = 15
+FILE_CASE_MISMATCH = 16
+FILE_DUPLICATED = 17
+FILE_UNKNOWN_ERROR = 0x0FFF
+
+OPTION_UNKNOWN = 0x1000
+OPTION_MISSING = 0x1001
+OPTION_CONFLICT = 0x1002
+OPTION_VALUE_INVALID = 0x1003
+OPTION_DEPRECATED = 0x1004
+OPTION_NOT_SUPPORTED = 0x1005
+OPTION_UNKNOWN_ERROR = 0x1FFF
+
+PARAMETER_INVALID = 0x2000
+PARAMETER_MISSING = 0x2001
+PARAMETER_UNKNOWN_ERROR =0x2FFF
+
+FORMAT_INVALID = 0x3000
+FORMAT_NOT_SUPPORTED = 0x3001
+FORMAT_UNKNOWN = 0x3002
+FORMAT_UNKNOWN_ERROR = 0x3FFF
+
+RESOURCE_NOT_AVAILABLE = 0x4000
+RESOURCE_ALLOCATE_FAILURE = 0x4001
+RESOURCE_FULL = 0x4002
+RESOURCE_OVERFLOW = 0x4003
+RESOURCE_UNDERRUN = 0x4004
+RESOURCE_UNKNOWN_ERROR = 0x4FFF
+
+ATTRIBUTE_NOT_AVAILABLE = 0x5000
+ATTRIBUTE_GET_FAILURE = 0x5001
+ATTRIBUTE_SET_FAILURE = 0x5002
+ATTRIBUTE_UPDATE_FAILURE = 0x5003
+ATTRIBUTE_ACCESS_DENIED = 0x5004
+ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF
+
+IO_NOT_READY = 0x6000
+IO_BUSY = 0x6001
+IO_TIMEOUT = 0x6002
+IO_UNKNOWN_ERROR = 0x6FFF
+
+COMMAND_FAILURE = 0x7000
+
+PERMISSION_FAILURE = 0x8000
+
+FV_FREESIZE_ERROR = 0x9000
+
+CODE_ERROR = 0xC0DE
+
+AUTOGEN_ERROR = 0xF000
+PARSER_ERROR = 0xF001
+BUILD_ERROR = 0xF002
+GENFDS_ERROR = 0xF003
+ECC_ERROR = 0xF004
+EOT_ERROR = 0xF005
+PREBUILD_ERROR = 0xF007
+POSTBUILD_ERROR = 0xF008
+DDC_ERROR = 0xF009
+WARNING_AS_ERROR = 0xF006
+MIGRATION_ERROR = 0xF010
+PCD_VALIDATION_INFO_ERROR = 0xF011
+PCD_VARIABLE_ATTRIBUTES_ERROR = 0xF012
+PCD_VARIABLE_INFO_ERROR = 0xF016
+PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR = 0xF013
+PCD_STRUCTURE_PCD_INVALID_FIELD_ERROR = 0xF014
+PCD_STRUCTURE_PCD_ERROR = 0xF015
+ERROR_STATEMENT = 0xFFFD
+ABORT_ERROR = 0xFFFE
+UNKNOWN_ERROR = 0xFFFF
+
+## Error message of each error code
+gErrorMessage = {
+ FILE_NOT_FOUND : "File/directory not found in workspace",
+ FILE_OPEN_FAILURE : "File open failure",
+ FILE_WRITE_FAILURE : "File write failure",
+ FILE_PARSE_FAILURE : "File parse failure",
+ FILE_READ_FAILURE : "File read failure",
+ FILE_CREATE_FAILURE : "File create failure",
+ FILE_CHECKSUM_FAILURE : "Invalid checksum of file",
+ FILE_COMPRESS_FAILURE : "File compress failure",
+ FILE_DECOMPRESS_FAILURE : "File decompress failure",
+ FILE_MOVE_FAILURE : "File move failure",
+ FILE_DELETE_FAILURE : "File delete failure",
+ FILE_COPY_FAILURE : "File copy failure",
+ FILE_POSITIONING_FAILURE: "Failed to seeking position",
+ FILE_ALREADY_EXIST : "File or directory already exists",
+ FILE_TYPE_MISMATCH : "Incorrect file type",
+ FILE_CASE_MISMATCH : "File name case mismatch",
+ FILE_DUPLICATED : "Duplicated file found",
+ FILE_UNKNOWN_ERROR : "Unknown error encountered on file",
+
+ OPTION_UNKNOWN : "Unknown option",
+ OPTION_MISSING : "Missing option",
+ OPTION_CONFLICT : "Conflict options",
+ OPTION_VALUE_INVALID : "Invalid value of option",
+ OPTION_DEPRECATED : "Deprecated option",
+ OPTION_NOT_SUPPORTED : "Unsupported option",
+ OPTION_UNKNOWN_ERROR : "Unknown error when processing options",
+
+ PARAMETER_INVALID : "Invalid parameter",
+ PARAMETER_MISSING : "Missing parameter",
+ PARAMETER_UNKNOWN_ERROR : "Unknown error in parameters",
+
+ FORMAT_INVALID : "Invalid syntax/format",
+ FORMAT_NOT_SUPPORTED : "Not supported syntax/format",
+ FORMAT_UNKNOWN : "Unknown format",
+ FORMAT_UNKNOWN_ERROR : "Unknown error in syntax/format ",
+
+ RESOURCE_NOT_AVAILABLE : "Not available",
+ RESOURCE_ALLOCATE_FAILURE : "Allocate failure",
+ RESOURCE_FULL : "Full",
+ RESOURCE_OVERFLOW : "Overflow",
+ RESOURCE_UNDERRUN : "Underrun",
+ RESOURCE_UNKNOWN_ERROR : "Unknown error",
+
+ ATTRIBUTE_NOT_AVAILABLE : "Not available",
+ ATTRIBUTE_GET_FAILURE : "Failed to retrieve",
+ ATTRIBUTE_SET_FAILURE : "Failed to set",
+ ATTRIBUTE_UPDATE_FAILURE: "Failed to update",
+ ATTRIBUTE_ACCESS_DENIED : "Access denied",
+ ATTRIBUTE_UNKNOWN_ERROR : "Unknown error when accessing",
+
+ COMMAND_FAILURE : "Failed to execute command",
+
+ IO_NOT_READY : "Not ready",
+ IO_BUSY : "Busy",
+ IO_TIMEOUT : "Timeout",
+ IO_UNKNOWN_ERROR : "Unknown error in IO operation",
+
+ ERROR_STATEMENT : "!error statement",
+ UNKNOWN_ERROR : "Unknown error",
+}
+
+## Exception indicating a fatal error
+class FatalError(Exception):
+ pass
+
+if __name__ == "__main__":
+ pass
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/BuildVersion.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/BuildVersion.py
new file mode 100644
index 00000000..d0758824
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/BuildVersion.py
@@ -0,0 +1,10 @@
+## @file
+#
+# This file is for build version number auto generation
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+gBUILD_VERSION = "Developer Build based on Revision: Unknown"
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/DataType.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/DataType.py
new file mode 100644
index 00000000..1ab829f1
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/DataType.py
@@ -0,0 +1,539 @@
+## @file
+# This file is used to define common static strings used by INF/DEC/DSC files
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# Portions copyright (c) 2011 - 2013, ARM Ltd. All rights reserved.<BR>
+# Portions Copyright (c) 2020, Hewlett Packard Enterprise Development LP. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+##
+# Common Definitions
+#
+TAB_SPLIT = '.'
+TAB_COMMENT_EDK_START = '/*'
+TAB_COMMENT_EDK_END = '*/'
+TAB_COMMENT_EDK_SPLIT = '//'
+TAB_COMMENT_SPLIT = '#'
+TAB_SPECIAL_COMMENT = '##'
+TAB_EQUAL_SPLIT = '='
+TAB_VALUE_SPLIT = '|'
+TAB_COMMA_SPLIT = ','
+TAB_SPACE_SPLIT = ' '
+TAB_SEMI_COLON_SPLIT = ';'
+TAB_SECTION_START = '['
+TAB_SECTION_END = ']'
+TAB_OPTION_START = '<'
+TAB_OPTION_END = '>'
+TAB_SLASH = '\\'
+TAB_BACK_SLASH = '/'
+TAB_STAR = '*'
+TAB_LINE_BREAK = '\n'
+TAB_PRINTCHAR_VT = '\x0b'
+TAB_PRINTCHAR_BS = '\b'
+TAB_PRINTCHAR_NUL = '\0'
+TAB_UINT8 = 'UINT8'
+TAB_UINT16 = 'UINT16'
+TAB_UINT32 = 'UINT32'
+TAB_UINT64 = 'UINT64'
+TAB_VOID = 'VOID*'
+TAB_GUID = 'GUID'
+
+TAB_PCD_CLEAN_NUMERIC_TYPES = {TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64}
+TAB_PCD_NUMERIC_TYPES = {TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, 'BOOLEAN'}
+TAB_PCD_NUMERIC_TYPES_VOID = {TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, 'BOOLEAN', TAB_VOID}
+
+TAB_WORKSPACE = '$(WORKSPACE)'
+TAB_FV_DIRECTORY = 'FV'
+
+TAB_ARCH_NULL = ''
+TAB_ARCH_COMMON = 'COMMON'
+TAB_ARCH_IA32 = 'IA32'
+TAB_ARCH_X64 = 'X64'
+TAB_ARCH_ARM = 'ARM'
+TAB_ARCH_EBC = 'EBC'
+TAB_ARCH_AARCH64 = 'AARCH64'
+
+TAB_ARCH_RISCV64 = 'RISCV64'
+
+ARCH_SET_FULL = {TAB_ARCH_IA32, TAB_ARCH_X64, TAB_ARCH_ARM, TAB_ARCH_EBC, TAB_ARCH_AARCH64, TAB_ARCH_RISCV64, TAB_ARCH_COMMON}
+
+SUP_MODULE_BASE = 'BASE'
+SUP_MODULE_SEC = 'SEC'
+SUP_MODULE_PEI_CORE = 'PEI_CORE'
+SUP_MODULE_PEIM = 'PEIM'
+SUP_MODULE_DXE_CORE = 'DXE_CORE'
+SUP_MODULE_DXE_DRIVER = 'DXE_DRIVER'
+SUP_MODULE_DXE_RUNTIME_DRIVER = 'DXE_RUNTIME_DRIVER'
+SUP_MODULE_DXE_SAL_DRIVER = 'DXE_SAL_DRIVER'
+SUP_MODULE_DXE_SMM_DRIVER = 'DXE_SMM_DRIVER'
+SUP_MODULE_UEFI_DRIVER = 'UEFI_DRIVER'
+SUP_MODULE_UEFI_APPLICATION = 'UEFI_APPLICATION'
+SUP_MODULE_USER_DEFINED = 'USER_DEFINED'
+SUP_MODULE_HOST_APPLICATION = 'HOST_APPLICATION'
+SUP_MODULE_SMM_CORE = 'SMM_CORE'
+SUP_MODULE_MM_STANDALONE = 'MM_STANDALONE'
+SUP_MODULE_MM_CORE_STANDALONE = 'MM_CORE_STANDALONE'
+
+SUP_MODULE_LIST = [SUP_MODULE_BASE, SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, \
+ SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_UEFI_DRIVER, \
+ SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_SMM_CORE, SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]
+SUP_MODULE_LIST_STRING = TAB_VALUE_SPLIT.join(SUP_MODULE_LIST)
+SUP_MODULE_SET_PEI = {SUP_MODULE_PEIM, SUP_MODULE_PEI_CORE}
+
+EDK_COMPONENT_TYPE_LIBRARY = 'LIBRARY'
+EDK_COMPONENT_TYPE_SECURITY_CORE = 'SECURITY_CORE'
+EDK_COMPONENT_TYPE_PEI_CORE = SUP_MODULE_PEI_CORE
+EDK_COMPONENT_TYPE_COMBINED_PEIM_DRIVER = 'COMBINED_PEIM_DRIVER'
+EDK_COMPONENT_TYPE_PIC_PEIM = 'PIC_PEIM'
+EDK_COMPONENT_TYPE_RELOCATABLE_PEIM = 'RELOCATABLE_PEIM'
+EDK_COMPONENT_TYPE_BS_DRIVER = 'BS_DRIVER'
+EDK_COMPONENT_TYPE_RT_DRIVER = 'RT_DRIVER'
+EDK_COMPONENT_TYPE_SAL_RT_DRIVER = 'SAL_RT_DRIVER'
+EDK_COMPONENT_TYPE_APPLICATION = 'APPLICATION'
+EDK_NAME = 'EDK'
+EDKII_NAME = 'EDKII'
+MSG_EDKII_MAIL_ADDR = 'devel@edk2.groups.io'
+
+COMPONENT_TO_MODULE_MAP_DICT = {
+ EDK_COMPONENT_TYPE_LIBRARY : SUP_MODULE_BASE,
+ EDK_COMPONENT_TYPE_SECURITY_CORE : SUP_MODULE_SEC,
+ EDK_COMPONENT_TYPE_PEI_CORE : SUP_MODULE_PEI_CORE,
+ EDK_COMPONENT_TYPE_COMBINED_PEIM_DRIVER : SUP_MODULE_PEIM,
+ EDK_COMPONENT_TYPE_PIC_PEIM : SUP_MODULE_PEIM,
+ EDK_COMPONENT_TYPE_RELOCATABLE_PEIM : SUP_MODULE_PEIM,
+ "PE32_PEIM" : SUP_MODULE_PEIM,
+ EDK_COMPONENT_TYPE_BS_DRIVER : SUP_MODULE_DXE_DRIVER,
+ EDK_COMPONENT_TYPE_RT_DRIVER : SUP_MODULE_DXE_RUNTIME_DRIVER,
+ EDK_COMPONENT_TYPE_SAL_RT_DRIVER : SUP_MODULE_DXE_SAL_DRIVER,
+ EDK_COMPONENT_TYPE_APPLICATION : SUP_MODULE_UEFI_APPLICATION,
+ "LOGO" : SUP_MODULE_BASE,
+}
+
+BINARY_FILE_TYPE_FW = 'FW'
+BINARY_FILE_TYPE_GUID = 'GUID'
+BINARY_FILE_TYPE_PREEFORM = 'PREEFORM'
+BINARY_FILE_TYPE_UEFI_APP = 'UEFI_APP'
+BINARY_FILE_TYPE_UNI_UI = 'UNI_UI'
+BINARY_FILE_TYPE_UNI_VER = 'UNI_VER'
+BINARY_FILE_TYPE_LIB = 'LIB'
+BINARY_FILE_TYPE_PE32 = 'PE32'
+BINARY_FILE_TYPE_PIC = 'PIC'
+BINARY_FILE_TYPE_PEI_DEPEX = 'PEI_DEPEX'
+BINARY_FILE_TYPE_DXE_DEPEX = 'DXE_DEPEX'
+BINARY_FILE_TYPE_SMM_DEPEX = 'SMM_DEPEX'
+BINARY_FILE_TYPE_TE = 'TE'
+BINARY_FILE_TYPE_VER = 'VER'
+BINARY_FILE_TYPE_UI = 'UI'
+BINARY_FILE_TYPE_BIN = 'BIN'
+BINARY_FILE_TYPE_FV = 'FV'
+BINARY_FILE_TYPE_RAW = 'RAW_BINARY'
+
+PLATFORM_COMPONENT_TYPE_LIBRARY_CLASS = 'LIBRARY_CLASS'
+PLATFORM_COMPONENT_TYPE_MODULE = 'MODULE'
+
+TAB_SOURCES = 'Sources'
+TAB_SOURCES_COMMON = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_SOURCES_IA32 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_SOURCES_X64 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_X64
+TAB_SOURCES_ARM = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_SOURCES_EBC = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_EBC
+TAB_SOURCES_AARCH64 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_BINARIES = 'Binaries'
+TAB_BINARIES_COMMON = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_BINARIES_IA32 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_BINARIES_X64 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_X64
+TAB_BINARIES_ARM = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_BINARIES_EBC = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_EBC
+TAB_BINARIES_AARCH64 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_INCLUDES = 'Includes'
+TAB_INCLUDES_COMMON = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_INCLUDES_IA32 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_INCLUDES_X64 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_X64
+TAB_INCLUDES_ARM = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_INCLUDES_EBC = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_EBC
+TAB_INCLUDES_AARCH64 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_GUIDS = 'Guids'
+TAB_GUIDS_COMMON = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_GUIDS_IA32 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_GUIDS_X64 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_X64
+TAB_GUIDS_ARM = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_GUIDS_EBC = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_EBC
+TAB_GUIDS_AARCH64 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_PROTOCOLS = 'Protocols'
+TAB_PROTOCOLS_COMMON = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PROTOCOLS_IA32 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PROTOCOLS_X64 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_X64
+TAB_PROTOCOLS_ARM = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PROTOCOLS_EBC = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_EBC
+TAB_PROTOCOLS_AARCH64 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_PPIS = 'Ppis'
+TAB_PPIS_COMMON = TAB_PPIS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PPIS_IA32 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PPIS_X64 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_X64
+TAB_PPIS_ARM = TAB_PPIS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PPIS_EBC = TAB_PPIS + TAB_SPLIT + TAB_ARCH_EBC
+TAB_PPIS_AARCH64 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_LIBRARY_CLASSES = 'LibraryClasses'
+TAB_LIBRARY_CLASSES_COMMON = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_LIBRARY_CLASSES_IA32 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_LIBRARY_CLASSES_X64 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_X64
+TAB_LIBRARY_CLASSES_ARM = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_LIBRARY_CLASSES_EBC = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_EBC
+TAB_LIBRARY_CLASSES_AARCH64 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_PACKAGES = 'Packages'
+TAB_PACKAGES_COMMON = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PACKAGES_IA32 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PACKAGES_X64 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_X64
+TAB_PACKAGES_ARM = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PACKAGES_EBC = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_EBC
+TAB_PACKAGES_AARCH64 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_PCDS = 'Pcds'
+TAB_PCDS_FIXED_AT_BUILD = 'FixedAtBuild'
+TAB_PCDS_PATCHABLE_IN_MODULE = 'PatchableInModule'
+TAB_PCDS_FEATURE_FLAG = 'FeatureFlag'
+TAB_PCDS_DYNAMIC_EX = 'DynamicEx'
+TAB_PCDS_DYNAMIC_EX_DEFAULT = 'DynamicExDefault'
+TAB_PCDS_DYNAMIC_EX_VPD = 'DynamicExVpd'
+TAB_PCDS_DYNAMIC_EX_HII = 'DynamicExHii'
+TAB_PCDS_DYNAMIC = 'Dynamic'
+TAB_PCDS_DYNAMIC_DEFAULT = 'DynamicDefault'
+TAB_PCDS_DYNAMIC_VPD = 'DynamicVpd'
+TAB_PCDS_DYNAMIC_HII = 'DynamicHii'
+
+PCD_DYNAMIC_TYPE_SET = {TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_DEFAULT, TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_HII}
+PCD_DYNAMIC_EX_TYPE_SET = {TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII}
+
+# leave as a list for order
+PCD_TYPE_LIST = [TAB_PCDS_FIXED_AT_BUILD, TAB_PCDS_PATCHABLE_IN_MODULE, TAB_PCDS_FEATURE_FLAG, TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_EX]
+
+TAB_PCDS_FIXED_AT_BUILD_NULL = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD
+TAB_PCDS_FIXED_AT_BUILD_COMMON = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_FIXED_AT_BUILD_IA32 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_FIXED_AT_BUILD_X64 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_FIXED_AT_BUILD_ARM = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_FIXED_AT_BUILD_EBC = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_EBC
+TAB_PCDS_FIXED_AT_BUILD_AARCH64 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_PCDS_PATCHABLE_IN_MODULE_NULL = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE
+TAB_PCDS_PATCHABLE_IN_MODULE_COMMON = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_PATCHABLE_IN_MODULE_IA32 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_PATCHABLE_IN_MODULE_X64 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_PATCHABLE_IN_MODULE_ARM = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_PATCHABLE_IN_MODULE_EBC = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_EBC
+TAB_PCDS_PATCHABLE_IN_MODULE_AARCH64 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_PCDS_FEATURE_FLAG_NULL = TAB_PCDS + TAB_PCDS_FEATURE_FLAG
+TAB_PCDS_FEATURE_FLAG_COMMON = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_FEATURE_FLAG_IA32 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_FEATURE_FLAG_X64 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_FEATURE_FLAG_ARM = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_FEATURE_FLAG_EBC = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_EBC
+TAB_PCDS_FEATURE_FLAG_AARCH64 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_PCDS_DYNAMIC_EX_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX
+TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_DEFAULT
+TAB_PCDS_DYNAMIC_EX_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_HII
+TAB_PCDS_DYNAMIC_EX_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_VPD
+TAB_PCDS_DYNAMIC_EX_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_DYNAMIC_EX_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_DYNAMIC_EX_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_DYNAMIC_EX_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_DYNAMIC_EX_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_EBC
+TAB_PCDS_DYNAMIC_EX_AARCH64 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_PCDS_DYNAMIC_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC
+TAB_PCDS_DYNAMIC_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_DEFAULT
+TAB_PCDS_DYNAMIC_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_HII
+TAB_PCDS_DYNAMIC_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_VPD
+TAB_PCDS_DYNAMIC_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_DYNAMIC_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_DYNAMIC_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_DYNAMIC_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_DYNAMIC_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_EBC
+TAB_PCDS_DYNAMIC_AARCH64 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE = 'PcdLoadFixAddressPeiCodePageNumber'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE_DATA_TYPE = 'UINT32'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE = 'PcdLoadFixAddressBootTimeCodePageNumber'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE_DATA_TYPE = 'UINT32'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE = 'PcdLoadFixAddressRuntimeCodePageNumber'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE_DATA_TYPE = 'UINT32'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE = 'PcdLoadFixAddressSmmCodePageNumber'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE_DATA_TYPE = 'UINT32'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SET = {TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE, \
+ TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE, \
+ TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE, \
+ TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE}
+
+## The mapping dictionary from datum type to its maximum number.
+MAX_VAL_TYPE = {"BOOLEAN":0x01, TAB_UINT8:0xFF, TAB_UINT16:0xFFFF, TAB_UINT32:0xFFFFFFFF, TAB_UINT64:0xFFFFFFFFFFFFFFFF}
+## The mapping dictionary from datum type to size string.
+MAX_SIZE_TYPE = {"BOOLEAN":1, TAB_UINT8:1, TAB_UINT16:2, TAB_UINT32:4, TAB_UINT64:8}
+
+TAB_DEPEX = 'Depex'
+TAB_DEPEX_COMMON = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_DEPEX_IA32 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_IA32
+TAB_DEPEX_X64 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_X64
+TAB_DEPEX_ARM = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_ARM
+TAB_DEPEX_EBC = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_EBC
+TAB_DEPEX_AARCH64 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_SKUIDS = 'SkuIds'
+TAB_DEFAULT_STORES = 'DefaultStores'
+TAB_DEFAULT_STORES_DEFAULT = 'STANDARD'
+
+TAB_LIBRARIES = 'Libraries'
+TAB_LIBRARIES_COMMON = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_LIBRARIES_IA32 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_LIBRARIES_X64 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_X64
+TAB_LIBRARIES_ARM = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_LIBRARIES_EBC = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_EBC
+TAB_LIBRARIES_AARCH64 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_COMPONENTS = 'Components'
+TAB_COMPONENTS_COMMON = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_COMPONENTS_IA32 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_COMPONENTS_X64 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_X64
+TAB_COMPONENTS_ARM = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_COMPONENTS_EBC = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_EBC
+TAB_COMPONENTS_AARCH64 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_AARCH64
+
+TAB_BUILD_OPTIONS = 'BuildOptions'
+
+TAB_DEFINE = 'DEFINE'
+TAB_NMAKE = 'Nmake'
+TAB_USER_EXTENSIONS = 'UserExtensions'
+TAB_INCLUDE = '!include'
+TAB_DEFAULT = 'DEFAULT'
+TAB_COMMON = 'COMMON'
+
+#
+# Common Define
+#
+TAB_COMMON_DEFINES = 'Defines'
+
+#
+# Inf Definitions
+#
+TAB_INF_DEFINES = TAB_COMMON_DEFINES
+TAB_INF_DEFINES_INF_VERSION = 'INF_VERSION'
+TAB_INF_DEFINES_BASE_NAME = 'BASE_NAME'
+TAB_INF_DEFINES_FILE_GUID = 'FILE_GUID'
+TAB_INF_DEFINES_MODULE_TYPE = 'MODULE_TYPE'
+TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION = 'EFI_SPECIFICATION_VERSION'
+TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION = 'UEFI_SPECIFICATION_VERSION'
+TAB_INF_DEFINES_PI_SPECIFICATION_VERSION = 'PI_SPECIFICATION_VERSION'
+TAB_INF_DEFINES_EDK_RELEASE_VERSION = 'EDK_RELEASE_VERSION'
+TAB_INF_DEFINES_BINARY_MODULE = 'BINARY_MODULE'
+TAB_INF_DEFINES_LIBRARY_CLASS = 'LIBRARY_CLASS'
+TAB_INF_DEFINES_COMPONENT_TYPE = 'COMPONENT_TYPE'
+TAB_INF_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
+TAB_INF_DEFINES_DPX_SOURCE = 'DPX_SOURCE'
+TAB_INF_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
+TAB_INF_DEFINES_BUILD_TYPE = 'BUILD_TYPE'
+TAB_INF_DEFINES_FFS_EXT = 'FFS_EXT'
+TAB_INF_DEFINES_FV_EXT = 'FV_EXT'
+TAB_INF_DEFINES_SOURCE_FV = 'SOURCE_FV'
+TAB_INF_DEFINES_VERSION_NUMBER = 'VERSION_NUMBER'
+TAB_INF_DEFINES_VERSION = 'VERSION' # for Edk inf, the same as VERSION_NUMBER
+TAB_INF_DEFINES_VERSION_STRING = 'VERSION_STRING'
+TAB_INF_DEFINES_PCD_IS_DRIVER = 'PCD_IS_DRIVER'
+TAB_INF_DEFINES_TIANO_EDK_FLASHMAP_H = 'TIANO_EDK_FLASHMAP_H'
+TAB_INF_DEFINES_ENTRY_POINT = 'ENTRY_POINT'
+TAB_INF_DEFINES_UNLOAD_IMAGE = 'UNLOAD_IMAGE'
+TAB_INF_DEFINES_CONSTRUCTOR = 'CONSTRUCTOR'
+TAB_INF_DEFINES_DESTRUCTOR = 'DESTRUCTOR'
+TAB_INF_DEFINES_DEFINE = 'DEFINE'
+TAB_INF_DEFINES_SPEC = 'SPEC'
+TAB_INF_DEFINES_CUSTOM_MAKEFILE = 'CUSTOM_MAKEFILE'
+TAB_INF_DEFINES_MACRO = '__MACROS__'
+TAB_INF_DEFINES_SHADOW = 'SHADOW'
+TAB_INF_FIXED_PCD = 'FixedPcd'
+TAB_INF_FEATURE_PCD = 'FeaturePcd'
+TAB_INF_PATCH_PCD = 'PatchPcd'
+TAB_INF_PCD = 'Pcd'
+TAB_INF_PCD_EX = 'PcdEx'
+TAB_INF_USAGE_PRO = 'PRODUCES'
+TAB_INF_USAGE_SOME_PRO = 'SOMETIMES_PRODUCES'
+TAB_INF_USAGE_CON = 'CONSUMES'
+TAB_INF_USAGE_SOME_CON = 'SOMETIMES_CONSUMES'
+TAB_INF_USAGE_NOTIFY = 'NOTIFY'
+TAB_INF_USAGE_TO_START = 'TO_START'
+TAB_INF_USAGE_BY_START = 'BY_START'
+TAB_INF_GUIDTYPE_EVENT = 'Event'
+TAB_INF_GUIDTYPE_FILE = 'File'
+TAB_INF_GUIDTYPE_FV = 'FV'
+TAB_INF_GUIDTYPE_GUID = 'GUID'
+TAB_INF_GUIDTYPE_HII = 'HII'
+TAB_INF_GUIDTYPE_HOB = 'HOB'
+TAB_INF_GUIDTYPE_ST = 'SystemTable'
+TAB_INF_GUIDTYPE_TSG = 'TokenSpaceGuid'
+TAB_INF_GUIDTYPE_VAR = 'Variable'
+TAB_INF_GUIDTYPE_PROTOCOL = 'PROTOCOL'
+TAB_INF_GUIDTYPE_PPI = 'PPI'
+TAB_INF_USAGE_UNDEFINED = 'UNDEFINED'
+
+#
+# Dec Definitions
+#
+TAB_DEC_DEFINES = TAB_COMMON_DEFINES
+TAB_DEC_DEFINES_DEC_SPECIFICATION = 'DEC_SPECIFICATION'
+TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'
+TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'
+TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'
+TAB_DEC_DEFINES_PKG_UNI_FILE = 'PKG_UNI_FILE'
+
+#
+# Dsc Definitions
+#
+TAB_DSC_DEFINES = TAB_COMMON_DEFINES
+TAB_DSC_DEFINES_PLATFORM_NAME = 'PLATFORM_NAME'
+TAB_DSC_DEFINES_PLATFORM_GUID = 'PLATFORM_GUID'
+TAB_DSC_DEFINES_PLATFORM_VERSION = 'PLATFORM_VERSION'
+TAB_DSC_DEFINES_DSC_SPECIFICATION = 'DSC_SPECIFICATION'
+TAB_DSC_DEFINES_OUTPUT_DIRECTORY = 'OUTPUT_DIRECTORY'
+TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES = 'SUPPORTED_ARCHITECTURES'
+TAB_DSC_DEFINES_BUILD_TARGETS = 'BUILD_TARGETS'
+TAB_DSC_DEFINES_SKUID_IDENTIFIER = 'SKUID_IDENTIFIER'
+TAB_DSC_DEFINES_PCD_INFO_GENERATION = 'PCD_INFO_GENERATION'
+TAB_DSC_DEFINES_PCD_VAR_CHECK_GENERATION = 'PCD_VAR_CHECK_GENERATION'
+TAB_DSC_DEFINES_FLASH_DEFINITION = 'FLASH_DEFINITION'
+TAB_DSC_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
+TAB_DSC_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
+TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'
+TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
+TAB_DSC_DEFINES_RFC_LANGUAGES = 'RFC_LANGUAGES'
+TAB_DSC_DEFINES_ISO_LANGUAGES = 'ISO_LANGUAGES'
+TAB_DSC_DEFINES_DEFINE = 'DEFINE'
+TAB_DSC_DEFINES_VPD_TOOL_GUID = 'VPD_TOOL_GUID'
+TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS'
+TAB_DSC_DEFINES_EDKGLOBAL = 'EDK_GLOBAL'
+TAB_DSC_PREBUILD = 'PREBUILD'
+TAB_DSC_POSTBUILD = 'POSTBUILD'
+#
+# TargetTxt Definitions
+#
+TAB_TAT_DEFINES_ACTIVE_PLATFORM = 'ACTIVE_PLATFORM'
+TAB_TAT_DEFINES_ACTIVE_MODULE = 'ACTIVE_MODULE'
+TAB_TAT_DEFINES_TOOL_CHAIN_CONF = 'TOOL_CHAIN_CONF'
+TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER = 'MAX_CONCURRENT_THREAD_NUMBER'
+TAB_TAT_DEFINES_TARGET = 'TARGET'
+TAB_TAT_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
+TAB_TAT_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
+TAB_TAT_DEFINES_BUILD_RULE_CONF = "BUILD_RULE_CONF"
+
+#
+# ToolDef Definitions
+#
+TAB_TOD_DEFINES_TARGET = 'TARGET'
+TAB_TOD_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
+TAB_TOD_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
+TAB_TOD_DEFINES_COMMAND_TYPE = 'COMMAND_TYPE'
+TAB_TOD_DEFINES_FAMILY = 'FAMILY'
+TAB_TOD_DEFINES_BUILDRULEFAMILY = 'BUILDRULEFAMILY'
+TAB_TOD_DEFINES_BUILDRULEORDER = 'BUILDRULEORDER'
+
+#
+# Conditional Statements
+#
+TAB_IF = '!if'
+TAB_END_IF = '!endif'
+TAB_ELSE_IF = '!elseif'
+TAB_ELSE = '!else'
+TAB_IF_DEF = '!ifdef'
+TAB_IF_N_DEF = '!ifndef'
+TAB_IF_EXIST = '!if exist'
+TAB_ERROR = '!error'
+
+#
+# Unknown section
+#
+TAB_UNKNOWN = 'UNKNOWN'
+
+#
+# Build database path
+#
+DATABASE_PATH = ":memory:" #"BuildDatabase.db"
+
+# used by ECC
+MODIFIER_SET = {'IN', 'OUT', 'OPTIONAL', 'UNALIGNED', 'EFI_RUNTIMESERVICE', 'EFI_BOOTSERVICE', 'EFIAPI'}
+
+# Dependency Opcodes
+DEPEX_OPCODE_BEFORE = "BEFORE"
+DEPEX_OPCODE_AFTER = "AFTER"
+DEPEX_OPCODE_PUSH = "PUSH"
+DEPEX_OPCODE_AND = "AND"
+DEPEX_OPCODE_OR = "OR"
+DEPEX_OPCODE_NOT = "NOT"
+DEPEX_OPCODE_END = "END"
+DEPEX_OPCODE_SOR = "SOR"
+DEPEX_OPCODE_TRUE = "TRUE"
+DEPEX_OPCODE_FALSE = "FALSE"
+
+# Dependency Expression
+DEPEX_SUPPORTED_OPCODE_SET = {"BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "END", "SOR", "TRUE", "FALSE", '(', ')'}
+
+TAB_STATIC_LIBRARY = "STATIC-LIBRARY-FILE"
+TAB_DYNAMIC_LIBRARY = "DYNAMIC-LIBRARY-FILE"
+TAB_FRAMEWORK_IMAGE = "EFI-IMAGE-FILE"
+TAB_C_CODE_FILE = "C-CODE-FILE"
+TAB_C_HEADER_FILE = "C-HEADER-FILE"
+TAB_UNICODE_FILE = "UNICODE-TEXT-FILE"
+TAB_IMAGE_FILE = "IMAGE-DEFINITION-FILE"
+TAB_DEPENDENCY_EXPRESSION_FILE = "DEPENDENCY-EXPRESSION-FILE"
+TAB_UNKNOWN_FILE = "UNKNOWN-TYPE-FILE"
+TAB_DEFAULT_BINARY_FILE = "_BINARY_FILE_"
+TAB_OBJECT_FILE = "OBJECT-FILE"
+TAB_VFR_FILE = 'VISUAL-FORM-REPRESENTATION-FILE'
+
+# used by BRG
+TAB_BRG_PCD = 'PCD'
+TAB_BRG_LIBRARY = 'Library'
+
+#
+# Build Rule File Version Definition
+#
+TAB_BUILD_RULE_VERSION = "build_rule_version"
+
+# section name for PCDs
+PCDS_DYNAMIC_DEFAULT = "PcdsDynamicDefault"
+PCDS_DYNAMIC_VPD = "PcdsDynamicVpd"
+PCDS_DYNAMIC_HII = "PcdsDynamicHii"
+PCDS_DYNAMICEX_DEFAULT = "PcdsDynamicExDefault"
+PCDS_DYNAMICEX_VPD = "PcdsDynamicExVpd"
+PCDS_DYNAMICEX_HII = "PcdsDynamicExHii"
+
+SECTIONS_HAVE_ITEM_PCD_SET = {PCDS_DYNAMIC_DEFAULT.upper(), PCDS_DYNAMIC_VPD.upper(), PCDS_DYNAMIC_HII.upper(), \
+ PCDS_DYNAMICEX_DEFAULT.upper(), PCDS_DYNAMICEX_VPD.upper(), PCDS_DYNAMICEX_HII.upper()}
+# Section allowed to have items after arch
+SECTIONS_HAVE_ITEM_AFTER_ARCH_SET = {TAB_LIBRARY_CLASSES.upper(), TAB_DEPEX.upper(), TAB_USER_EXTENSIONS.upper(),
+ PCDS_DYNAMIC_DEFAULT.upper(),
+ PCDS_DYNAMIC_VPD.upper(),
+ PCDS_DYNAMIC_HII.upper(),
+ PCDS_DYNAMICEX_DEFAULT.upper(),
+ PCDS_DYNAMICEX_VPD.upper(),
+ PCDS_DYNAMICEX_HII.upper(),
+ TAB_BUILD_OPTIONS.upper(),
+ TAB_PACKAGES.upper(),
+ TAB_INCLUDES.upper()}
+
+#
+# pack codes as used in PcdDb and elsewhere
+#
+PACK_PATTERN_GUID = '=LHHBBBBBBBB'
+PACK_CODE_BY_SIZE = {8:'=Q',
+ 4:'=L',
+ 2:'=H',
+ 1:'=B',
+ 0:'=B',
+ 16:""}
+
+TAB_COMPILER_MSFT = 'MSFT'
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Edk2/Capsule/FmpPayloadHeader.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Edk2/Capsule/FmpPayloadHeader.py
new file mode 100755
index 00000000..aa45c3a9
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Edk2/Capsule/FmpPayloadHeader.py
@@ -0,0 +1,85 @@
+## @file
+# Module that encodes and decodes a FMP_PAYLOAD_HEADER with a payload.
+# The FMP_PAYLOAD_HEADER is processed by the FmpPayloadHeaderLib in the
+# FmpDevicePkg.
+#
+# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+FmpPayloadHeader
+'''
+
+import struct
+
+def _SIGNATURE_32 (A, B, C, D):
+ return struct.unpack ('=I',bytearray (A + B + C + D, 'ascii'))[0]
+
+def _SIGNATURE_32_TO_STRING (Signature):
+ return struct.pack ("<I", Signature).decode ()
+
+class FmpPayloadHeaderClass (object):
+ #
+ # typedef struct {
+ # UINT32 Signature;
+ # UINT32 HeaderSize;
+ # UINT32 FwVersion;
+ # UINT32 LowestSupportedVersion;
+ # } FMP_PAYLOAD_HEADER;
+ #
+ # #define FMP_PAYLOAD_HEADER_SIGNATURE SIGNATURE_32 ('M', 'S', 'S', '1')
+ #
+ _StructFormat = '<IIII'
+ _StructSize = struct.calcsize (_StructFormat)
+
+ _FMP_PAYLOAD_HEADER_SIGNATURE = _SIGNATURE_32 ('M', 'S', 'S', '1')
+
+ def __init__ (self):
+ self._Valid = False
+ self.Signature = self._FMP_PAYLOAD_HEADER_SIGNATURE
+ self.HeaderSize = self._StructSize
+ self.FwVersion = 0x00000000
+ self.LowestSupportedVersion = 0x00000000
+ self.Payload = b''
+
+ def Encode (self):
+ FmpPayloadHeader = struct.pack (
+ self._StructFormat,
+ self.Signature,
+ self.HeaderSize,
+ self.FwVersion,
+ self.LowestSupportedVersion
+ )
+ self._Valid = True
+ return FmpPayloadHeader + self.Payload
+
+ def Decode (self, Buffer):
+ if len (Buffer) < self._StructSize:
+ raise ValueError
+ (Signature, HeaderSize, FwVersion, LowestSupportedVersion) = \
+ struct.unpack (
+ self._StructFormat,
+ Buffer[0:self._StructSize]
+ )
+ if Signature != self._FMP_PAYLOAD_HEADER_SIGNATURE:
+ raise ValueError
+ if HeaderSize < self._StructSize:
+ raise ValueError
+ self.Signature = Signature
+ self.HeaderSize = HeaderSize
+ self.FwVersion = FwVersion
+ self.LowestSupportedVersion = LowestSupportedVersion
+ self.Payload = Buffer[self.HeaderSize:]
+
+ self._Valid = True
+ return self.Payload
+
+ def DumpInfo (self):
+ if not self._Valid:
+ raise ValueError
+ print ('FMP_PAYLOAD_HEADER.Signature = {Signature:08X} ({SignatureString})'.format (Signature = self.Signature, SignatureString = _SIGNATURE_32_TO_STRING (self.Signature)))
+ print ('FMP_PAYLOAD_HEADER.HeaderSize = {HeaderSize:08X}'.format (HeaderSize = self.HeaderSize))
+ print ('FMP_PAYLOAD_HEADER.FwVersion = {FwVersion:08X}'.format (FwVersion = self.FwVersion))
+ print ('FMP_PAYLOAD_HEADER.LowestSupportedVersion = {LowestSupportedVersion:08X}'.format (LowestSupportedVersion = self.LowestSupportedVersion))
+ print ('sizeof (Payload) = {Size:08X}'.format (Size = len (self.Payload)))
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Edk2/Capsule/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Edk2/Capsule/__init__.py
new file mode 100644
index 00000000..119f1f2a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Edk2/Capsule/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'Common.Edk2.Capsule' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Edk2/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Edk2/__init__.py
new file mode 100644
index 00000000..0d26bda5
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Edk2/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'Common.Edk2' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/EdkLogger.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/EdkLogger.py
new file mode 100755
index 00000000..ee088687
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/EdkLogger.py
@@ -0,0 +1,421 @@
+## @file
+# This file implements the log mechanism for Python tools.
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+# Copyright 2001-2016 by Vinay Sajip. All Rights Reserved.
+#
+# Permission to use, copy, modify, and distribute this software and its
+# documentation for any purpose and without fee is hereby granted,
+# provided that the above copyright notice appear in all copies and that
+# both that copyright notice and this permission notice appear in
+# supporting documentation, and that the name of Vinay Sajip
+# not be used in advertising or publicity pertaining to distribution
+# of the software without specific, written prior permission.
+# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
+# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
+# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
+# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
+# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+# This copyright is for QueueHandler.
+
+## Import modules
+from __future__ import absolute_import
+import Common.LongFilePathOs as os, sys, logging
+import traceback
+from .BuildToolError import *
+try:
+ from logging.handlers import QueueHandler
+except:
+ class QueueHandler(logging.Handler):
+ """
+ This handler sends events to a queue. Typically, it would be used together
+ with a multiprocessing Queue to centralise logging to file in one process
+ (in a multi-process application), so as to avoid file write contention
+ between processes.
+
+ This code is new in Python 3.2, but this class can be copy pasted into
+ user code for use with earlier Python versions.
+ """
+
+ def __init__(self, queue):
+ """
+ Initialise an instance, using the passed queue.
+ """
+ logging.Handler.__init__(self)
+ self.queue = queue
+
+ def enqueue(self, record):
+ """
+ Enqueue a record.
+
+ The base implementation uses put_nowait. You may want to override
+ this method if you want to use blocking, timeouts or custom queue
+ implementations.
+ """
+ self.queue.put_nowait(record)
+
+ def prepare(self, record):
+ """
+ Prepares a record for queuing. The object returned by this method is
+ enqueued.
+
+ The base implementation formats the record to merge the message
+ and arguments, and removes unpickleable items from the record
+ in-place.
+
+ You might want to override this method if you want to convert
+ the record to a dict or JSON string, or send a modified copy
+ of the record while leaving the original intact.
+ """
+ # The format operation gets traceback text into record.exc_text
+ # (if there's exception data), and also returns the formatted
+ # message. We can then use this to replace the original
+ # msg + args, as these might be unpickleable. We also zap the
+ # exc_info and exc_text attributes, as they are no longer
+ # needed and, if not None, will typically not be pickleable.
+ msg = self.format(record)
+ record.message = msg
+ record.msg = msg
+ record.args = None
+ record.exc_info = None
+ record.exc_text = None
+ return record
+
+ def emit(self, record):
+ """
+ Emit a record.
+
+ Writes the LogRecord to the queue, preparing it for pickling first.
+ """
+ try:
+ self.enqueue(self.prepare(record))
+ except Exception:
+ self.handleError(record)
+class BlockQueueHandler(QueueHandler):
+ def enqueue(self, record):
+ self.queue.put(record,True)
+## Log level constants
+DEBUG_0 = 1
+DEBUG_1 = 2
+DEBUG_2 = 3
+DEBUG_3 = 4
+DEBUG_4 = 5
+DEBUG_5 = 6
+DEBUG_6 = 7
+DEBUG_7 = 8
+DEBUG_8 = 9
+DEBUG_9 = 10
+VERBOSE = 15
+INFO = 20
+WARN = 30
+QUIET = 40
+ERROR = 50
+SILENT = 99
+
+IsRaiseError = True
+
+# Tool name
+_ToolName = os.path.basename(sys.argv[0])
+
+# For validation purpose
+_LogLevels = [DEBUG_0, DEBUG_1, DEBUG_2, DEBUG_3, DEBUG_4, DEBUG_5,
+ DEBUG_6, DEBUG_7, DEBUG_8, DEBUG_9, VERBOSE, WARN, INFO,
+ ERROR, QUIET, SILENT]
+
+# For DEBUG level (All DEBUG_0~9 are applicable)
+_DebugLogger = logging.getLogger("tool_debug")
+_DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")
+
+# For VERBOSE, INFO, WARN level
+_InfoLogger = logging.getLogger("tool_info")
+_InfoFormatter = logging.Formatter("%(message)s")
+
+# For ERROR level
+_ErrorLogger = logging.getLogger("tool_error")
+_ErrorFormatter = logging.Formatter("%(message)s")
+
+# String templates for ERROR/WARN/DEBUG log message
+_ErrorMessageTemplate = '\n\n%(tool)s...\n%(file)s(%(line)s): error %(errorcode)04X: %(msg)s\n\t%(extra)s'
+_ErrorMessageTemplateWithoutFile = '\n\n%(tool)s...\n : error %(errorcode)04X: %(msg)s\n\t%(extra)s'
+_WarningMessageTemplate = '%(tool)s...\n%(file)s(%(line)s): warning: %(msg)s'
+_WarningMessageTemplateWithoutFile = '%(tool)s: : warning: %(msg)s'
+_DebugMessageTemplate = '%(file)s(%(line)s): debug: \n %(msg)s'
+
+#
+# Flag used to take WARN as ERROR.
+# By default, only ERROR message will break the tools execution.
+#
+_WarningAsError = False
+
+## Log debug message
+#
+# @param Level DEBUG level (DEBUG0~9)
+# @param Message Debug information
+# @param ExtraData More information associated with "Message"
+#
+def debug(Level, Message, ExtraData=None):
+ if _DebugLogger.level > Level:
+ return
+ if Level > DEBUG_9:
+ return
+
+ # Find out the caller method information
+ CallerStack = traceback.extract_stack()[-2]
+ TemplateDict = {
+ "file" : CallerStack[0],
+ "line" : CallerStack[1],
+ "msg" : Message,
+ }
+
+ if ExtraData is not None:
+ LogText = _DebugMessageTemplate % TemplateDict + "\n %s" % ExtraData
+ else:
+ LogText = _DebugMessageTemplate % TemplateDict
+
+ _DebugLogger.log(Level, LogText)
+
+## Log verbose message
+#
+# @param Message Verbose information
+#
+def verbose(Message):
+ return _InfoLogger.log(VERBOSE, Message)
+
+## Log warning message
+#
+# Warning messages are those which might be wrong but won't fail the tool.
+#
+# @param ToolName The name of the tool. If not given, the name of caller
+# method will be used.
+# @param Message Warning information
+# @param File The name of file which caused the warning.
+# @param Line The line number in the "File" which caused the warning.
+# @param ExtraData More information associated with "Message"
+#
+def warn(ToolName, Message, File=None, Line=None, ExtraData=None):
+ if _InfoLogger.level > WARN:
+ return
+
+ # if no tool name given, use caller's source file name as tool name
+ if ToolName is None or ToolName == "":
+ ToolName = os.path.basename(traceback.extract_stack()[-2][0])
+
+ if Line is None:
+ Line = "..."
+ else:
+ Line = "%d" % Line
+
+ TemplateDict = {
+ "tool" : ToolName,
+ "file" : File,
+ "line" : Line,
+ "msg" : Message,
+ }
+
+ if File is not None:
+ LogText = _WarningMessageTemplate % TemplateDict
+ else:
+ LogText = _WarningMessageTemplateWithoutFile % TemplateDict
+
+ if ExtraData is not None:
+ LogText += "\n %s" % ExtraData
+
+ _InfoLogger.log(WARN, LogText)
+
+ # Raise an exception if indicated
+ if _WarningAsError == True:
+ raise FatalError(WARNING_AS_ERROR)
+
+## Log INFO message
+info = _InfoLogger.info
+
+## Log ERROR message
+#
+# Once an error messages is logged, the tool's execution will be broken by raising
+# an exception. If you don't want to break the execution later, you can give
+# "RaiseError" with "False" value.
+#
+# @param ToolName The name of the tool. If not given, the name of caller
+# method will be used.
+# @param ErrorCode The error code
+# @param Message Warning information
+# @param File The name of file which caused the error.
+# @param Line The line number in the "File" which caused the warning.
+# @param ExtraData More information associated with "Message"
+# @param RaiseError Raise an exception to break the tool's execution if
+# it's True. This is the default behavior.
+#
+def error(ToolName, ErrorCode, Message=None, File=None, Line=None, ExtraData=None, RaiseError=IsRaiseError):
+ if Line is None:
+ Line = "..."
+ else:
+ Line = "%d" % Line
+
+ if Message is None:
+ if ErrorCode in gErrorMessage:
+ Message = gErrorMessage[ErrorCode]
+ else:
+ Message = gErrorMessage[UNKNOWN_ERROR]
+
+ if ExtraData is None:
+ ExtraData = ""
+
+ TemplateDict = {
+ "tool" : _ToolName,
+ "file" : File,
+ "line" : Line,
+ "errorcode" : ErrorCode,
+ "msg" : Message,
+ "extra" : ExtraData
+ }
+
+ if File is not None:
+ LogText = _ErrorMessageTemplate % TemplateDict
+ else:
+ LogText = _ErrorMessageTemplateWithoutFile % TemplateDict
+
+ # VBox - begin
+ LogText += '\n' + getSimpleStack(3);
+ # VBox - endi
+
+ _ErrorLogger.log(ERROR, LogText)
+
+ if RaiseError and IsRaiseError:
+ raise FatalError(ErrorCode)
+
+# Log information which should be always put out
+quiet = _ErrorLogger.error
+
+# VBox - begin
+
+## Get caller info
+# @return String with caller name, file and line number.
+# @param iFrame The frame number of the caller to get.
+def getCallerName(oFrame = None, iFrame = 2):
+ if oFrame is None:
+ try:
+ raise Exception();
+ except:
+ oFrame = sys.exc_info()[2].tb_frame.f_back;
+ while iFrame > 1:
+ if oFrame is not None:
+ oFrame = oFrame.f_back;
+ iFrame = iFrame - 1;
+ if oFrame is not None:
+ return '%s %s:%u' % (oFrame.f_code.co_name, oFrame.f_code.co_filename, oFrame.f_lineno);
+ return None;
+
+## @Get a simple stack trace.
+# @return simple stack trace (string).
+# @param iFrame The frame to start with.
+# @param cMaxFrames The maximum number of frames to dump.
+def getSimpleStack(iFrame = 2, cMaxFrames = 7):
+ sStack = 'Stack:\n'
+ for i in range(cMaxFrames):
+ sCaller = getCallerName(iFrame = iFrame + i);
+ if sCaller is None:
+ break;
+ sStack += '[%u] %s\n' % (i + 1, sCaller);
+ return sStack;
+
+# VBox - end
+
+## Initialize log system
+def LogClientInitialize(log_q):
+ #
+ # Since we use different format to log different levels of message into different
+ # place (stdout or stderr), we have to use different "Logger" objects to do this.
+ #
+ # For DEBUG level (All DEBUG_0~9 are applicable)
+ _DebugLogger.setLevel(INFO)
+ _DebugChannel = BlockQueueHandler(log_q)
+ _DebugChannel.setFormatter(_DebugFormatter)
+ _DebugLogger.addHandler(_DebugChannel)
+
+ # For VERBOSE, INFO, WARN level
+ _InfoLogger.setLevel(INFO)
+ _InfoChannel = BlockQueueHandler(log_q)
+ _InfoChannel.setFormatter(_InfoFormatter)
+ _InfoLogger.addHandler(_InfoChannel)
+
+ # For ERROR level
+ _ErrorLogger.setLevel(INFO)
+ _ErrorCh = BlockQueueHandler(log_q)
+ _ErrorCh.setFormatter(_ErrorFormatter)
+ _ErrorLogger.addHandler(_ErrorCh)
+
+## Set log level
+#
+# @param Level One of log level in _LogLevel
+def SetLevel(Level):
+ if Level not in _LogLevels:
+ info("Not supported log level (%d). Use default level instead." % Level)
+ Level = INFO
+ _DebugLogger.setLevel(Level)
+ _InfoLogger.setLevel(Level)
+ _ErrorLogger.setLevel(Level)
+
+## Initialize log system
+def Initialize():
+ #
+ # Since we use different format to log different levels of message into different
+ # place (stdout or stderr), we have to use different "Logger" objects to do this.
+ #
+ # For DEBUG level (All DEBUG_0~9 are applicable)
+ _DebugLogger.setLevel(INFO)
+ _DebugChannel = logging.StreamHandler(sys.stdout)
+ _DebugChannel.setFormatter(_DebugFormatter)
+ _DebugLogger.addHandler(_DebugChannel)
+
+ # For VERBOSE, INFO, WARN level
+ _InfoLogger.setLevel(INFO)
+ _InfoChannel = logging.StreamHandler(sys.stdout)
+ _InfoChannel.setFormatter(_InfoFormatter)
+ _InfoLogger.addHandler(_InfoChannel)
+
+ # For ERROR level
+ _ErrorLogger.setLevel(INFO)
+ _ErrorCh = logging.StreamHandler(sys.stderr)
+ _ErrorCh.setFormatter(_ErrorFormatter)
+ _ErrorLogger.addHandler(_ErrorCh)
+
+def InitializeForUnitTest():
+ Initialize()
+ SetLevel(SILENT)
+
+## Get current log level
+def GetLevel():
+ return _InfoLogger.getEffectiveLevel()
+
+## Raise up warning as error
+def SetWarningAsError():
+ global _WarningAsError
+ _WarningAsError = True
+
+## Specify a file to store the log message as well as put on console
+#
+# @param LogFile The file path used to store the log message
+#
+def SetLogFile(LogFile):
+ if os.path.exists(LogFile):
+ os.remove(LogFile)
+
+ _Ch = logging.FileHandler(LogFile)
+ _Ch.setFormatter(_DebugFormatter)
+ _DebugLogger.addHandler(_Ch)
+
+ _Ch= logging.FileHandler(LogFile)
+ _Ch.setFormatter(_InfoFormatter)
+ _InfoLogger.addHandler(_Ch)
+
+ _Ch = logging.FileHandler(LogFile)
+ _Ch.setFormatter(_ErrorFormatter)
+ _ErrorLogger.addHandler(_Ch)
+
+if __name__ == '__main__':
+ pass
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Expression.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Expression.py
new file mode 100755
index 00000000..e6228862
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Expression.py
@@ -0,0 +1,1054 @@
+## @file
+# This file is used to parse and evaluate expression in directive or PCD value.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+## Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+from Common.GlobalData import *
+from CommonDataClass.Exceptions import BadExpression
+from CommonDataClass.Exceptions import WrnExpression
+from .Misc import GuidStringToGuidStructureString, ParseFieldValue,CopyDict
+import Common.EdkLogger as EdkLogger
+import copy
+from Common.DataType import *
+import sys
+from random import sample
+import string
+
+ERR_STRING_EXPR = 'This operator cannot be used in string expression: [%s].'
+ERR_SNYTAX = 'Syntax error, the rest of expression cannot be evaluated: [%s].'
+ERR_MATCH = 'No matching right parenthesis.'
+ERR_STRING_TOKEN = 'Bad string token: [%s].'
+ERR_MACRO_TOKEN = 'Bad macro token: [%s].'
+ERR_EMPTY_TOKEN = 'Empty token is not allowed.'
+ERR_PCD_RESOLVE = 'The PCD should be FeatureFlag type or FixedAtBuild type: [%s].'
+ERR_VALID_TOKEN = 'No more valid token found from rest of string: [%s].'
+ERR_EXPR_TYPE = 'Different types found in expression.'
+ERR_OPERATOR_UNSUPPORT = 'Unsupported operator: [%s]'
+ERR_REL_NOT_IN = 'Expect "IN" after "not" operator.'
+WRN_BOOL_EXPR = 'Operand of boolean type cannot be used in arithmetic expression.'
+WRN_EQCMP_STR_OTHERS = '== Comparison between Operand of string type and Boolean/Number Type always return False.'
+WRN_NECMP_STR_OTHERS = '!= Comparison between Operand of string type and Boolean/Number Type always return True.'
+ERR_RELCMP_STR_OTHERS = 'Operator taking Operand of string type and Boolean/Number Type is not allowed: [%s].'
+ERR_STRING_CMP = 'Unicode string and general string cannot be compared: [%s %s %s]'
+ERR_ARRAY_TOKEN = 'Bad C array or C format GUID token: [%s].'
+ERR_ARRAY_ELE = 'This must be HEX value for NList or Array: [%s].'
+ERR_EMPTY_EXPR = 'Empty expression is not allowed.'
+ERR_IN_OPERAND = 'Macro after IN operator can only be: $(FAMILY), $(ARCH), $(TOOL_CHAIN_TAG) and $(TARGET).'
+
+__ValidString = re.compile(r'[_a-zA-Z][_0-9a-zA-Z]*$')
+_ReLabel = re.compile('LABEL\((\w+)\)')
+_ReOffset = re.compile('OFFSET_OF\((\w+)\)')
+PcdPattern = re.compile(r'[_a-zA-Z][0-9A-Za-z_]*\.[_a-zA-Z][0-9A-Za-z_]*$')
+
+## SplitString
+# Split string to list according double quote
+# For example: abc"de\"f"ghi"jkl"mn will be: ['abc', '"de\"f"', 'ghi', '"jkl"', 'mn']
+#
+def SplitString(String):
+ # There might be escaped quote: "abc\"def\\\"ghi", 'abc\'def\\\'ghi'
+ RanStr = ''.join(sample(string.ascii_letters + string.digits, 8))
+ String = String.replace('\\\\', RanStr).strip()
+ RetList = []
+ InSingleQuote = False
+ InDoubleQuote = False
+ Item = ''
+ for i, ch in enumerate(String):
+ if ch == '"' and not InSingleQuote:
+ if String[i - 1] != '\\':
+ InDoubleQuote = not InDoubleQuote
+ if not InDoubleQuote:
+ Item += String[i]
+ RetList.append(Item)
+ Item = ''
+ continue
+ if Item:
+ RetList.append(Item)
+ Item = ''
+ elif ch == "'" and not InDoubleQuote:
+ if String[i - 1] != '\\':
+ InSingleQuote = not InSingleQuote
+ if not InSingleQuote:
+ Item += String[i]
+ RetList.append(Item)
+ Item = ''
+ continue
+ if Item:
+ RetList.append(Item)
+ Item = ''
+ Item += String[i]
+ if InSingleQuote or InDoubleQuote:
+ raise BadExpression(ERR_STRING_TOKEN % Item)
+ if Item:
+ RetList.append(Item)
+ for i, ch in enumerate(RetList):
+ if RanStr in ch:
+ RetList[i] = ch.replace(RanStr,'\\\\')
+ return RetList
+
+def SplitPcdValueString(String):
+ # There might be escaped comma in GUID() or DEVICE_PATH() or " "
+ # or ' ' or L' ' or L" "
+ RanStr = ''.join(sample(string.ascii_letters + string.digits, 8))
+ String = String.replace('\\\\', RanStr).strip()
+ RetList = []
+ InParenthesis = 0
+ InSingleQuote = False
+ InDoubleQuote = False
+ Item = ''
+ for i, ch in enumerate(String):
+ if ch == '(':
+ InParenthesis += 1
+ elif ch == ')':
+ if InParenthesis:
+ InParenthesis -= 1
+ else:
+ raise BadExpression(ERR_STRING_TOKEN % Item)
+ elif ch == '"' and not InSingleQuote:
+ if String[i-1] != '\\':
+ InDoubleQuote = not InDoubleQuote
+ elif ch == "'" and not InDoubleQuote:
+ if String[i-1] != '\\':
+ InSingleQuote = not InSingleQuote
+ elif ch == ',':
+ if InParenthesis or InSingleQuote or InDoubleQuote:
+ Item += String[i]
+ continue
+ elif Item:
+ RetList.append(Item)
+ Item = ''
+ continue
+ Item += String[i]
+ if InSingleQuote or InDoubleQuote or InParenthesis:
+ raise BadExpression(ERR_STRING_TOKEN % Item)
+ if Item:
+ RetList.append(Item)
+ for i, ch in enumerate(RetList):
+ if RanStr in ch:
+ RetList[i] = ch.replace(RanStr,'\\\\')
+ return RetList
+
+def IsValidCName(Str):
+ return True if __ValidString.match(Str) else False
+
+def BuildOptionValue(PcdValue, GuidDict):
+ if PcdValue.startswith('H'):
+ InputValue = PcdValue[1:]
+ elif PcdValue.startswith("L'") or PcdValue.startswith("'"):
+ InputValue = PcdValue
+ elif PcdValue.startswith('L'):
+ InputValue = 'L"' + PcdValue[1:] + '"'
+ else:
+ InputValue = PcdValue
+ try:
+ PcdValue = ValueExpressionEx(InputValue, TAB_VOID, GuidDict)(True)
+ except:
+ pass
+
+ return PcdValue
+
+## ReplaceExprMacro
+#
+def ReplaceExprMacro(String, Macros, ExceptionList = None):
+ StrList = SplitString(String)
+ for i, String in enumerate(StrList):
+ InQuote = False
+ if String.startswith('"'):
+ InQuote = True
+ MacroStartPos = String.find('$(')
+ if MacroStartPos < 0:
+ for Pcd in gPlatformPcds:
+ if Pcd in String:
+ if Pcd not in gConditionalPcds:
+ gConditionalPcds.append(Pcd)
+ continue
+ RetStr = ''
+ while MacroStartPos >= 0:
+ RetStr = String[0:MacroStartPos]
+ MacroEndPos = String.find(')', MacroStartPos)
+ if MacroEndPos < 0:
+ raise BadExpression(ERR_MACRO_TOKEN % String[MacroStartPos:])
+ Macro = String[MacroStartPos+2:MacroEndPos]
+ if Macro not in Macros:
+ # From C reference manual:
+ # If an undefined macro name appears in the constant-expression of
+ # !if or !elif, it is replaced by the integer constant 0.
+ RetStr += '0'
+ elif not InQuote:
+ Tklst = RetStr.split()
+ if Tklst and Tklst[-1] in {'IN', 'in'} and ExceptionList and Macro not in ExceptionList:
+ raise BadExpression(ERR_IN_OPERAND)
+ # Make sure the macro in exception list is encapsulated by double quote
+ # For example: DEFINE ARCH = IA32 X64
+ # $(ARCH) is replaced with "IA32 X64"
+ if ExceptionList and Macro in ExceptionList:
+ RetStr += '"' + Macros[Macro] + '"'
+ elif Macros[Macro].strip():
+ RetStr += Macros[Macro]
+ else:
+ RetStr += '""'
+ else:
+ RetStr += Macros[Macro]
+ RetStr += String[MacroEndPos+1:]
+ String = RetStr
+ MacroStartPos = String.find('$(')
+ StrList[i] = RetStr
+ return ''.join(StrList)
+
+# transfer int to string for in/not in expression
+def IntToStr(Value):
+ StrList = []
+ while Value > 0:
+ StrList.append(chr(Value & 0xff))
+ Value = Value >> 8
+ Value = '"' + ''.join(StrList) + '"'
+ return Value
+
+SupportedInMacroList = ['TARGET', 'TOOL_CHAIN_TAG', 'ARCH', 'FAMILY']
+
+class BaseExpression(object):
+ def __init__(self, *args, **kwargs):
+ super(BaseExpression, self).__init__()
+
+ # Check if current token matches the operators given from parameter
+ def _IsOperator(self, OpSet):
+ Idx = self._Idx
+ self._GetOperator()
+ if self._Token in OpSet:
+ if self._Token in self.LogicalOperators:
+ self._Token = self.LogicalOperators[self._Token]
+ return True
+ self._Idx = Idx
+ return False
+
+class ValueExpression(BaseExpression):
+ # Logical operator mapping
+ LogicalOperators = {
+ '&&' : 'and', '||' : 'or',
+ '!' : 'not', 'AND': 'and',
+ 'OR' : 'or' , 'NOT': 'not',
+ 'XOR': '^' , 'xor': '^',
+ 'EQ' : '==' , 'NE' : '!=',
+ 'GT' : '>' , 'LT' : '<',
+ 'GE' : '>=' , 'LE' : '<=',
+ 'IN' : 'in'
+ }
+
+ NonLetterOpLst = ['+', '-', TAB_STAR, '/', '%', '&', '|', '^', '~', '<<', '>>', '!', '=', '>', '<', '?', ':']
+
+
+ SymbolPattern = re.compile("("
+ "\$\([A-Z][A-Z0-9_]*\)|\$\(\w+\.\w+\)|\w+\.\w+|"
+ "&&|\|\||!(?!=)|"
+ "(?<=\W)AND(?=\W)|(?<=\W)OR(?=\W)|(?<=\W)NOT(?=\W)|(?<=\W)XOR(?=\W)|"
+ "(?<=\W)EQ(?=\W)|(?<=\W)NE(?=\W)|(?<=\W)GT(?=\W)|(?<=\W)LT(?=\W)|(?<=\W)GE(?=\W)|(?<=\W)LE(?=\W)"
+ ")")
+
+ @staticmethod
+ def Eval(Operator, Oprand1, Oprand2 = None):
+ WrnExp = None
+
+ if Operator not in {"==", "!=", ">=", "<=", ">", "<", "in", "not in"} and \
+ (isinstance(Oprand1, type('')) or isinstance(Oprand2, type(''))):
+ raise BadExpression(ERR_STRING_EXPR % Operator)
+ if Operator in {'in', 'not in'}:
+ if not isinstance(Oprand1, type('')):
+ Oprand1 = IntToStr(Oprand1)
+ if not isinstance(Oprand2, type('')):
+ Oprand2 = IntToStr(Oprand2)
+ TypeDict = {
+ type(0) : 0,
+ # For python2 long type
+ type(sys.maxsize + 1) : 0,
+ type('') : 1,
+ type(True) : 2
+ }
+
+ EvalStr = ''
+ if Operator in {"!", "NOT", "not"}:
+ if isinstance(Oprand1, type('')):
+ raise BadExpression(ERR_STRING_EXPR % Operator)
+ EvalStr = 'not Oprand1'
+ elif Operator in {"~"}:
+ if isinstance(Oprand1, type('')):
+ raise BadExpression(ERR_STRING_EXPR % Operator)
+ EvalStr = '~ Oprand1'
+ else:
+ if Operator in {"+", "-"} and (type(True) in {type(Oprand1), type(Oprand2)}):
+ # Boolean in '+'/'-' will be evaluated but raise warning
+ WrnExp = WrnExpression(WRN_BOOL_EXPR)
+ elif type('') in {type(Oprand1), type(Oprand2)} and not isinstance(Oprand1, type(Oprand2)):
+ # == between string and number/boolean will always return False, != return True
+ if Operator == "==":
+ WrnExp = WrnExpression(WRN_EQCMP_STR_OTHERS)
+ WrnExp.result = False
+ raise WrnExp
+ elif Operator == "!=":
+ WrnExp = WrnExpression(WRN_NECMP_STR_OTHERS)
+ WrnExp.result = True
+ raise WrnExp
+ else:
+ raise BadExpression(ERR_RELCMP_STR_OTHERS % Operator)
+ elif TypeDict[type(Oprand1)] != TypeDict[type(Oprand2)]:
+ if Operator in {"==", "!=", ">=", "<=", ">", "<"} and set((TypeDict[type(Oprand1)], TypeDict[type(Oprand2)])) == set((TypeDict[type(True)], TypeDict[type(0)])):
+ # comparison between number and boolean is allowed
+ pass
+ elif Operator in {'&', '|', '^', "and", "or"} and set((TypeDict[type(Oprand1)], TypeDict[type(Oprand2)])) == set((TypeDict[type(True)], TypeDict[type(0)])):
+ # bitwise and logical operation between number and boolean is allowed
+ pass
+ else:
+ raise BadExpression(ERR_EXPR_TYPE)
+ if isinstance(Oprand1, type('')) and isinstance(Oprand2, type('')):
+ if ((Oprand1.startswith('L"') or Oprand1.startswith("L'")) and (not Oprand2.startswith('L"')) and (not Oprand2.startswith("L'"))) or \
+ (((not Oprand1.startswith('L"')) and (not Oprand1.startswith("L'"))) and (Oprand2.startswith('L"') or Oprand2.startswith("L'"))):
+ raise BadExpression(ERR_STRING_CMP % (Oprand1, Operator, Oprand2))
+ if 'in' in Operator and isinstance(Oprand2, type('')):
+ Oprand2 = Oprand2.split()
+ EvalStr = 'Oprand1 ' + Operator + ' Oprand2'
+
+ # Local symbols used by built in eval function
+ Dict = {
+ 'Oprand1' : Oprand1,
+ 'Oprand2' : Oprand2
+ }
+ try:
+ Val = eval(EvalStr, {}, Dict)
+ except Exception as Excpt:
+ raise BadExpression(str(Excpt))
+
+ if Operator in {'and', 'or'}:
+ if Val:
+ Val = True
+ else:
+ Val = False
+
+ if WrnExp:
+ WrnExp.result = Val
+ raise WrnExp
+ return Val
+
+ def __init__(self, Expression, SymbolTable={}):
+ super(ValueExpression, self).__init__(self, Expression, SymbolTable)
+ self._NoProcess = False
+ if not isinstance(Expression, type('')):
+ self._Expr = Expression
+ self._NoProcess = True
+ return
+
+ self._Expr = ReplaceExprMacro(Expression.strip(),
+ SymbolTable,
+ SupportedInMacroList)
+
+ if not self._Expr.strip():
+ raise BadExpression(ERR_EMPTY_EXPR)
+
+ #
+ # The symbol table including PCD and macro mapping
+ #
+ self._Symb = CopyDict(SymbolTable)
+ self._Symb.update(self.LogicalOperators)
+ self._Idx = 0
+ self._Len = len(self._Expr)
+ self._Token = ''
+ self._WarnExcept = None
+
+ # Literal token without any conversion
+ self._LiteralToken = ''
+
+ # Public entry for this class
+ # @param RealValue: False: only evaluate if the expression is true or false, used for conditional expression
+ # True : return the evaluated str(value), used for PCD value
+ #
+ # @return: True or False if RealValue is False
+ # Evaluated value of string format if RealValue is True
+ #
+ def __call__(self, RealValue=False, Depth=0):
+ if self._NoProcess:
+ return self._Expr
+
+ self._Depth = Depth
+
+ self._Expr = self._Expr.strip()
+ if RealValue and Depth == 0:
+ self._Token = self._Expr
+ if self.__IsNumberToken():
+ return self._Expr
+ Token = ''
+ try:
+ Token = self._GetToken()
+ except BadExpression:
+ pass
+ if isinstance(Token, type('')) and Token.startswith('{') and Token.endswith('}') and self._Idx >= self._Len:
+ return self._Expr
+
+ self._Idx = 0
+ self._Token = ''
+
+ Val = self._ConExpr()
+ RealVal = Val
+ if isinstance(Val, type('')):
+ if Val == 'L""':
+ Val = False
+ elif not Val:
+ Val = False
+ RealVal = '""'
+ elif not Val.startswith('L"') and not Val.startswith('{') and not Val.startswith("L'") and not Val.startswith("'"):
+ Val = True
+ RealVal = '"' + RealVal + '"'
+
+ # The expression has been parsed, but the end of expression is not reached
+ # It means the rest does not comply EBNF of <Expression>
+ if self._Idx != self._Len:
+ raise BadExpression(ERR_SNYTAX % self._Expr[self._Idx:])
+
+ if RealValue:
+ RetVal = str(RealVal)
+ elif Val:
+ RetVal = True
+ else:
+ RetVal = False
+
+ if self._WarnExcept:
+ self._WarnExcept.result = RetVal
+ raise self._WarnExcept
+ else:
+ return RetVal
+
+ # Template function to parse binary operators which have same precedence
+ # Expr [Operator Expr]*
+ def _ExprFuncTemplate(self, EvalFunc, OpSet):
+ Val = EvalFunc()
+ while self._IsOperator(OpSet):
+ Op = self._Token
+ if Op == '?':
+ Val2 = EvalFunc()
+ if self._IsOperator({':'}):
+ Val3 = EvalFunc()
+ if Val:
+ Val = Val2
+ else:
+ Val = Val3
+ continue
+ #
+ # PEP 238 -- Changing the Division Operator
+ # x/y to return a reasonable approximation of the mathematical result of the division ("true division")
+ # x//y to return the floor ("floor division")
+ #
+ if Op == '/':
+ Op = '//'
+ try:
+ Val = self.Eval(Op, Val, EvalFunc())
+ except WrnExpression as Warn:
+ self._WarnExcept = Warn
+ Val = Warn.result
+ return Val
+ # A [? B]*
+ def _ConExpr(self):
+ return self._ExprFuncTemplate(self._OrExpr, {'?', ':'})
+
+ # A [|| B]*
+ def _OrExpr(self):
+ return self._ExprFuncTemplate(self._AndExpr, {"OR", "or", "||"})
+
+ # A [&& B]*
+ def _AndExpr(self):
+ return self._ExprFuncTemplate(self._BitOr, {"AND", "and", "&&"})
+
+ # A [ | B]*
+ def _BitOr(self):
+ return self._ExprFuncTemplate(self._BitXor, {"|"})
+
+ # A [ ^ B]*
+ def _BitXor(self):
+ return self._ExprFuncTemplate(self._BitAnd, {"XOR", "xor", "^"})
+
+ # A [ & B]*
+ def _BitAnd(self):
+ return self._ExprFuncTemplate(self._EqExpr, {"&"})
+
+ # A [ == B]*
+ def _EqExpr(self):
+ Val = self._RelExpr()
+ while self._IsOperator({"==", "!=", "EQ", "NE", "IN", "in", "!", "NOT", "not"}):
+ Op = self._Token
+ if Op in {"!", "NOT", "not"}:
+ if not self._IsOperator({"IN", "in"}):
+ raise BadExpression(ERR_REL_NOT_IN)
+ Op += ' ' + self._Token
+ try:
+ Val = self.Eval(Op, Val, self._RelExpr())
+ except WrnExpression as Warn:
+ self._WarnExcept = Warn
+ Val = Warn.result
+ return Val
+
+ # A [ > B]*
+ def _RelExpr(self):
+ return self._ExprFuncTemplate(self._ShiftExpr, {"<=", ">=", "<", ">", "LE", "GE", "LT", "GT"})
+
+ def _ShiftExpr(self):
+ return self._ExprFuncTemplate(self._AddExpr, {"<<", ">>"})
+
+ # A [ + B]*
+ def _AddExpr(self):
+ return self._ExprFuncTemplate(self._MulExpr, {"+", "-"})
+
+ # A [ * B]*
+ def _MulExpr(self):
+ return self._ExprFuncTemplate(self._UnaryExpr, {TAB_STAR, "/", "%"})
+
+ # [!]*A
+ def _UnaryExpr(self):
+ if self._IsOperator({"!", "NOT", "not"}):
+ Val = self._UnaryExpr()
+ try:
+ return self.Eval('not', Val)
+ except WrnExpression as Warn:
+ self._WarnExcept = Warn
+ return Warn.result
+ if self._IsOperator({"~"}):
+ Val = self._UnaryExpr()
+ try:
+ return self.Eval('~', Val)
+ except WrnExpression as Warn:
+ self._WarnExcept = Warn
+ return Warn.result
+ return self._IdenExpr()
+
+ # Parse identifier or encapsulated expression
+ def _IdenExpr(self):
+ Tk = self._GetToken()
+ if Tk == '(':
+ Val = self._ConExpr()
+ try:
+ # _GetToken may also raise BadExpression
+ if self._GetToken() != ')':
+ raise BadExpression(ERR_MATCH)
+ except BadExpression:
+ raise BadExpression(ERR_MATCH)
+ return Val
+ return Tk
+
+ # Skip whitespace or tab
+ def __SkipWS(self):
+ for Char in self._Expr[self._Idx:]:
+ if Char not in ' \t':
+ break
+ self._Idx += 1
+
+ # Try to convert string to number
+ def __IsNumberToken(self):
+ Radix = 10
+ if self._Token.lower()[0:2] == '0x' and len(self._Token) > 2:
+ Radix = 16
+ if self._Token.startswith('"') or self._Token.startswith('L"'):
+ Flag = 0
+ for Index in range(len(self._Token)):
+ if self._Token[Index] in {'"'}:
+ if self._Token[Index - 1] == '\\':
+ continue
+ Flag += 1
+ if Flag == 2 and self._Token.endswith('"'):
+ return True
+ if self._Token.startswith("'") or self._Token.startswith("L'"):
+ Flag = 0
+ for Index in range(len(self._Token)):
+ if self._Token[Index] in {"'"}:
+ if self._Token[Index - 1] == '\\':
+ continue
+ Flag += 1
+ if Flag == 2 and self._Token.endswith("'"):
+ return True
+ try:
+ self._Token = int(self._Token, Radix)
+ return True
+ except ValueError:
+ return False
+ except TypeError:
+ return False
+
+ # Parse array: {...}
+ def __GetArray(self):
+ Token = '{'
+ self._Idx += 1
+ self.__GetNList(True)
+ Token += self._LiteralToken
+ if self._Idx >= self._Len or self._Expr[self._Idx] != '}':
+ raise BadExpression(ERR_ARRAY_TOKEN % Token)
+ Token += '}'
+
+ # All whitespace and tabs in array are already stripped.
+ IsArray = IsGuid = False
+ if len(Token.split(',')) == 11 and len(Token.split(',{')) == 2 \
+ and len(Token.split('},')) == 1:
+ HexLen = [11, 6, 6, 5, 4, 4, 4, 4, 4, 4, 6]
+ HexList= Token.split(',')
+ if HexList[3].startswith('{') and \
+ not [Index for Index, Hex in enumerate(HexList) if len(Hex) > HexLen[Index]]:
+ IsGuid = True
+ if Token.lstrip('{').rstrip('}').find('{') == -1:
+ if not [Hex for Hex in Token.lstrip('{').rstrip('}').split(',') if len(Hex) > 4]:
+ IsArray = True
+ if not IsArray and not IsGuid:
+ raise BadExpression(ERR_ARRAY_TOKEN % Token)
+ self._Idx += 1
+ self._Token = self._LiteralToken = Token
+ return self._Token
+
+ # Parse string, the format must be: "..."
+ def __GetString(self):
+ Idx = self._Idx
+
+ # Skip left quote
+ self._Idx += 1
+
+ # Replace escape \\\", \"
+ if self._Expr[Idx] == '"':
+ Expr = self._Expr[self._Idx:].replace('\\\\', '//').replace('\\\"', '\\\'')
+ for Ch in Expr:
+ self._Idx += 1
+ if Ch == '"':
+ break
+ self._Token = self._LiteralToken = self._Expr[Idx:self._Idx]
+ if not self._Token.endswith('"'):
+ raise BadExpression(ERR_STRING_TOKEN % self._Token)
+ #Replace escape \\\', \'
+ elif self._Expr[Idx] == "'":
+ Expr = self._Expr[self._Idx:].replace('\\\\', '//').replace("\\\'", "\\\"")
+ for Ch in Expr:
+ self._Idx += 1
+ if Ch == "'":
+ break
+ self._Token = self._LiteralToken = self._Expr[Idx:self._Idx]
+ if not self._Token.endswith("'"):
+ raise BadExpression(ERR_STRING_TOKEN % self._Token)
+ self._Token = self._Token[1:-1]
+ return self._Token
+
+ # Get token that is comprised by alphanumeric, underscore or dot(used by PCD)
+ # @param IsAlphaOp: Indicate if parsing general token or script operator(EQ, NE...)
+ def __GetIdToken(self, IsAlphaOp = False):
+ IdToken = ''
+ for Ch in self._Expr[self._Idx:]:
+ if not self.__IsIdChar(Ch) or ('?' in self._Expr and Ch == ':'):
+ break
+ self._Idx += 1
+ IdToken += Ch
+
+ self._Token = self._LiteralToken = IdToken
+ if not IsAlphaOp:
+ self.__ResolveToken()
+ return self._Token
+
+ # Try to resolve token
+ def __ResolveToken(self):
+ if not self._Token:
+ raise BadExpression(ERR_EMPTY_TOKEN)
+
+ # PCD token
+ if PcdPattern.match(self._Token):
+ if self._Token not in self._Symb:
+ Ex = BadExpression(ERR_PCD_RESOLVE % self._Token)
+ Ex.Pcd = self._Token
+ raise Ex
+ self._Token = ValueExpression(self._Symb[self._Token], self._Symb)(True, self._Depth+1)
+ if not isinstance(self._Token, type('')):
+ self._LiteralToken = hex(self._Token)
+ return
+
+ if self._Token.startswith('"'):
+ self._Token = self._Token[1:-1]
+ elif self._Token in {"FALSE", "false", "False"}:
+ self._Token = False
+ elif self._Token in {"TRUE", "true", "True"}:
+ self._Token = True
+ else:
+ self.__IsNumberToken()
+
+ def __GetNList(self, InArray=False):
+ self._GetSingleToken()
+ if not self.__IsHexLiteral():
+ if InArray:
+ raise BadExpression(ERR_ARRAY_ELE % self._Token)
+ return self._Token
+
+ self.__SkipWS()
+ Expr = self._Expr[self._Idx:]
+ if not Expr.startswith(','):
+ return self._Token
+
+ NList = self._LiteralToken
+ while Expr.startswith(','):
+ NList += ','
+ self._Idx += 1
+ self.__SkipWS()
+ self._GetSingleToken()
+ if not self.__IsHexLiteral():
+ raise BadExpression(ERR_ARRAY_ELE % self._Token)
+ NList += self._LiteralToken
+ self.__SkipWS()
+ Expr = self._Expr[self._Idx:]
+ self._Token = self._LiteralToken = NList
+ return self._Token
+
+ def __IsHexLiteral(self):
+ if self._LiteralToken.startswith('{') and \
+ self._LiteralToken.endswith('}'):
+ return True
+
+ if gHexPattern.match(self._LiteralToken):
+ Token = self._LiteralToken[2:]
+ if not Token:
+ self._LiteralToken = '0x0'
+ else:
+ self._LiteralToken = '0x' + Token
+ return True
+ return False
+
+ def _GetToken(self):
+ return self.__GetNList()
+
+ @staticmethod
+ def __IsIdChar(Ch):
+ return Ch in '._:' or Ch.isalnum()
+
+ # Parse operand
+ def _GetSingleToken(self):
+ self.__SkipWS()
+ Expr = self._Expr[self._Idx:]
+ if Expr.startswith('L"'):
+ # Skip L
+ self._Idx += 1
+ UStr = self.__GetString()
+ self._Token = 'L"' + UStr + '"'
+ return self._Token
+ elif Expr.startswith("L'"):
+ # Skip L
+ self._Idx += 1
+ UStr = self.__GetString()
+ self._Token = "L'" + UStr + "'"
+ return self._Token
+ elif Expr.startswith("'"):
+ UStr = self.__GetString()
+ self._Token = "'" + UStr + "'"
+ return self._Token
+ elif Expr.startswith('UINT'):
+ Re = re.compile('(?:UINT8|UINT16|UINT32|UINT64)\((.+)\)')
+ try:
+ RetValue = Re.search(Expr).group(1)
+ except:
+ raise BadExpression('Invalid Expression %s' % Expr)
+ Idx = self._Idx
+ for Ch in Expr:
+ self._Idx += 1
+ if Ch == '(':
+ Prefix = self._Expr[Idx:self._Idx - 1]
+ Idx = self._Idx
+ if Ch == ')':
+ TmpValue = self._Expr[Idx :self._Idx - 1]
+ TmpValue = ValueExpression(TmpValue)(True)
+ TmpValue = '0x%x' % int(TmpValue) if not isinstance(TmpValue, type('')) else TmpValue
+ break
+ self._Token, Size = ParseFieldValue(Prefix + '(' + TmpValue + ')')
+ return self._Token
+
+ self._Token = ''
+ if Expr:
+ Ch = Expr[0]
+ Match = gGuidPattern.match(Expr)
+ if Match and not Expr[Match.end():Match.end()+1].isalnum() \
+ and Expr[Match.end():Match.end()+1] != '_':
+ self._Idx += Match.end()
+ self._Token = ValueExpression(GuidStringToGuidStructureString(Expr[0:Match.end()]))(True, self._Depth+1)
+ return self._Token
+ elif self.__IsIdChar(Ch):
+ return self.__GetIdToken()
+ elif Ch == '"':
+ return self.__GetString()
+ elif Ch == '{':
+ return self.__GetArray()
+ elif Ch == '(' or Ch == ')':
+ self._Idx += 1
+ self._Token = Ch
+ return self._Token
+
+ raise BadExpression(ERR_VALID_TOKEN % Expr)
+
+ # Parse operator
+ def _GetOperator(self):
+ self.__SkipWS()
+ LegalOpLst = ['&&', '||', '!=', '==', '>=', '<='] + self.NonLetterOpLst + ['?', ':']
+
+ self._Token = ''
+ Expr = self._Expr[self._Idx:]
+
+ # Reach end of expression
+ if not Expr:
+ return ''
+
+ # Script operator: LT, GT, LE, GE, EQ, NE, and, or, xor, not
+ if Expr[0].isalpha():
+ return self.__GetIdToken(True)
+
+ # Start to get regular operator: +, -, <, > ...
+ if Expr[0] not in self.NonLetterOpLst:
+ return ''
+
+ OpToken = ''
+ for Ch in Expr:
+ if Ch in self.NonLetterOpLst:
+ if Ch in ['!', '~'] and OpToken:
+ break
+ self._Idx += 1
+ OpToken += Ch
+ else:
+ break
+
+ if OpToken not in LegalOpLst:
+ raise BadExpression(ERR_OPERATOR_UNSUPPORT % OpToken)
+ self._Token = OpToken
+ return OpToken
+
+class ValueExpressionEx(ValueExpression):
+ def __init__(self, PcdValue, PcdType, SymbolTable={}):
+ ValueExpression.__init__(self, PcdValue, SymbolTable)
+ self.PcdValue = PcdValue
+ self.PcdType = PcdType
+
+ def __call__(self, RealValue=False, Depth=0):
+ PcdValue = self.PcdValue
+ if "{CODE(" not in PcdValue:
+ try:
+ PcdValue = ValueExpression.__call__(self, RealValue, Depth)
+ if self.PcdType == TAB_VOID and (PcdValue.startswith("'") or PcdValue.startswith("L'")):
+ PcdValue, Size = ParseFieldValue(PcdValue)
+ PcdValueList = []
+ for I in range(Size):
+ PcdValueList.append('0x%02X'%(PcdValue & 0xff))
+ PcdValue = PcdValue >> 8
+ PcdValue = '{' + ','.join(PcdValueList) + '}'
+ elif self.PcdType in TAB_PCD_NUMERIC_TYPES and (PcdValue.startswith("'") or \
+ PcdValue.startswith('"') or PcdValue.startswith("L'") or PcdValue.startswith('L"') or PcdValue.startswith('{')):
+ raise BadExpression
+ except WrnExpression as Value:
+ PcdValue = Value.result
+ except BadExpression as Value:
+ if self.PcdType in TAB_PCD_NUMERIC_TYPES:
+ PcdValue = PcdValue.strip()
+ if PcdValue.startswith('{') and PcdValue.endswith('}'):
+ PcdValue = SplitPcdValueString(PcdValue[1:-1])
+ if isinstance(PcdValue, type([])):
+ TmpValue = 0
+ Size = 0
+ ValueType = ''
+ for Item in PcdValue:
+ Item = Item.strip()
+ if Item.startswith(TAB_UINT8):
+ ItemSize = 1
+ ValueType = TAB_UINT8
+ elif Item.startswith(TAB_UINT16):
+ ItemSize = 2
+ ValueType = TAB_UINT16
+ elif Item.startswith(TAB_UINT32):
+ ItemSize = 4
+ ValueType = TAB_UINT32
+ elif Item.startswith(TAB_UINT64):
+ ItemSize = 8
+ ValueType = TAB_UINT64
+ elif Item[0] in {'"', "'", 'L'}:
+ ItemSize = 0
+ ValueType = TAB_VOID
+ else:
+ ItemSize = 0
+ ValueType = TAB_UINT8
+ Item = ValueExpressionEx(Item, ValueType, self._Symb)(True)
+ if ItemSize == 0:
+ try:
+ tmpValue = int(Item, 0)
+ if tmpValue > 255:
+ raise BadExpression("Byte array number %s should less than 0xFF." % Item)
+ except BadExpression as Value:
+ raise BadExpression(Value)
+ except ValueError:
+ pass
+ ItemValue, ItemSize = ParseFieldValue(Item)
+ else:
+ ItemValue = ParseFieldValue(Item)[0]
+
+ if isinstance(ItemValue, type('')):
+ ItemValue = int(ItemValue, 0)
+
+ TmpValue = (ItemValue << (Size * 8)) | TmpValue
+ Size = Size + ItemSize
+ else:
+ try:
+ TmpValue, Size = ParseFieldValue(PcdValue)
+ except BadExpression as Value:
+ raise BadExpression("Type: %s, Value: %s, %s" % (self.PcdType, PcdValue, Value))
+ if isinstance(TmpValue, type('')):
+ try:
+ TmpValue = int(TmpValue)
+ except:
+ raise BadExpression(Value)
+ else:
+ PcdValue = '0x%0{}X'.format(Size) % (TmpValue)
+ if TmpValue < 0:
+ raise BadExpression('Type %s PCD Value is negative' % self.PcdType)
+ if self.PcdType == TAB_UINT8 and Size > 1:
+ raise BadExpression('Type %s PCD Value Size is Larger than 1 byte' % self.PcdType)
+ if self.PcdType == TAB_UINT16 and Size > 2:
+ raise BadExpression('Type %s PCD Value Size is Larger than 2 byte' % self.PcdType)
+ if self.PcdType == TAB_UINT32 and Size > 4:
+ raise BadExpression('Type %s PCD Value Size is Larger than 4 byte' % self.PcdType)
+ if self.PcdType == TAB_UINT64 and Size > 8:
+ raise BadExpression('Type %s PCD Value Size is Larger than 8 byte' % self.PcdType)
+ else:
+ try:
+ TmpValue = int(PcdValue)
+ TmpList = []
+ if TmpValue.bit_length() == 0:
+ PcdValue = '{0x00}'
+ else:
+ for I in range((TmpValue.bit_length() + 7) // 8):
+ TmpList.append('0x%02x' % ((TmpValue >> I * 8) & 0xff))
+ PcdValue = '{' + ', '.join(TmpList) + '}'
+ except:
+ if PcdValue.strip().startswith('{'):
+ PcdValueList = SplitPcdValueString(PcdValue.strip()[1:-1])
+ LabelDict = {}
+ NewPcdValueList = []
+ LabelOffset = 0
+ for Item in PcdValueList:
+ # compute byte offset of every LABEL
+ LabelList = _ReLabel.findall(Item)
+ Item = _ReLabel.sub('', Item)
+ Item = Item.strip()
+ if LabelList:
+ for Label in LabelList:
+ if not IsValidCName(Label):
+ raise BadExpression('%s is not a valid c variable name' % Label)
+ if Label not in LabelDict:
+ LabelDict[Label] = str(LabelOffset)
+ if Item.startswith(TAB_UINT8):
+ LabelOffset = LabelOffset + 1
+ elif Item.startswith(TAB_UINT16):
+ LabelOffset = LabelOffset + 2
+ elif Item.startswith(TAB_UINT32):
+ LabelOffset = LabelOffset + 4
+ elif Item.startswith(TAB_UINT64):
+ LabelOffset = LabelOffset + 8
+ else:
+ try:
+ ItemValue, ItemSize = ParseFieldValue(Item)
+ LabelOffset = LabelOffset + ItemSize
+ except:
+ LabelOffset = LabelOffset + 1
+
+ for Item in PcdValueList:
+ # for LABEL parse
+ Item = Item.strip()
+ try:
+ Item = _ReLabel.sub('', Item)
+ except:
+ pass
+ try:
+ OffsetList = _ReOffset.findall(Item)
+ except:
+ pass
+ # replace each offset, except errors
+ for Offset in OffsetList:
+ try:
+ Item = Item.replace('OFFSET_OF({})'.format(Offset), LabelDict[Offset])
+ except:
+ raise BadExpression('%s not defined' % Offset)
+
+ NewPcdValueList.append(Item)
+
+ AllPcdValueList = []
+ for Item in NewPcdValueList:
+ Size = 0
+ ValueStr = ''
+ TokenSpaceGuidName = ''
+ if Item.startswith(TAB_GUID) and Item.endswith(')'):
+ try:
+ TokenSpaceGuidName = re.search('GUID\((\w+)\)', Item).group(1)
+ except:
+ pass
+ if TokenSpaceGuidName and TokenSpaceGuidName in self._Symb:
+ Item = 'GUID(' + self._Symb[TokenSpaceGuidName] + ')'
+ elif TokenSpaceGuidName:
+ raise BadExpression('%s not found in DEC file' % TokenSpaceGuidName)
+ Item, Size = ParseFieldValue(Item)
+ for Index in range(0, Size):
+ ValueStr = '0x%02X' % (int(Item) & 255)
+ Item >>= 8
+ AllPcdValueList.append(ValueStr)
+ continue
+ elif Item.startswith('DEVICE_PATH') and Item.endswith(')'):
+ Item, Size = ParseFieldValue(Item)
+ AllPcdValueList.append(Item[1:-1])
+ continue
+ else:
+ ValueType = ""
+ if Item.startswith(TAB_UINT8):
+ ItemSize = 1
+ ValueType = TAB_UINT8
+ elif Item.startswith(TAB_UINT16):
+ ItemSize = 2
+ ValueType = TAB_UINT16
+ elif Item.startswith(TAB_UINT32):
+ ItemSize = 4
+ ValueType = TAB_UINT32
+ elif Item.startswith(TAB_UINT64):
+ ItemSize = 8
+ ValueType = TAB_UINT64
+ else:
+ ItemSize = 0
+ if ValueType:
+ TmpValue = ValueExpressionEx(Item, ValueType, self._Symb)(True)
+ else:
+ TmpValue = ValueExpressionEx(Item, self.PcdType, self._Symb)(True)
+ Item = '0x%x' % TmpValue if not isinstance(TmpValue, type('')) else TmpValue
+ if ItemSize == 0:
+ ItemValue, ItemSize = ParseFieldValue(Item)
+ if Item[0] not in {'"', 'L', '{'} and ItemSize > 1:
+ raise BadExpression("Byte array number %s should less than 0xFF." % Item)
+ else:
+ ItemValue = ParseFieldValue(Item)[0]
+ for I in range(0, ItemSize):
+ ValueStr = '0x%02X' % (int(ItemValue) & 255)
+ ItemValue >>= 8
+ AllPcdValueList.append(ValueStr)
+ Size += ItemSize
+
+ if Size > 0:
+ PcdValue = '{' + ','.join(AllPcdValueList) + '}'
+ else:
+ raise BadExpression("Type: %s, Value: %s, %s"%(self.PcdType, PcdValue, Value))
+
+ if PcdValue == 'True':
+ PcdValue = '1'
+ if PcdValue == 'False':
+ PcdValue = '0'
+
+ if RealValue:
+ return PcdValue
+
+if __name__ == '__main__':
+ pass
+ while True:
+ input = raw_input('Input expr: ')
+ if input in 'qQ':
+ break
+ try:
+ print(ValueExpression(input)(True))
+ print(ValueExpression(input)(False))
+ except WrnExpression as Ex:
+ print(Ex.result)
+ print(str(Ex))
+ except Exception as Ex:
+ print(str(Ex))
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/GlobalData.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/GlobalData.py
new file mode 100644
index 00000000..13248d94
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/GlobalData.py
@@ -0,0 +1,124 @@
+## @file
+# This file is used to define common static strings used by INF/DEC/DSC files
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+import re
+
+gIsWindows = None
+gWorkspace = "."
+gOptions = None
+gCaseInsensitive = False
+gAllFiles = None
+gCommand = None
+gSKUID_CMD = None
+
+gGlobalDefines = {}
+gPlatformDefines = {}
+# PCD name and value pair for fixed at build and feature flag
+gPlatformPcds = {}
+# PCDs with type that are not fixed at build and feature flag
+gPlatformOtherPcds = {}
+gActivePlatform = None
+gCommandLineDefines = {}
+gEdkGlobal = {}
+gCommandMaxLength = 4096
+# for debug trace purpose when problem occurs
+gProcessingFile = ''
+gBuildingModule = ''
+gSkuids = []
+gDefaultStores = []
+gGuidDict = {}
+
+# definition for a MACRO name. used to create regular expressions below.
+_MacroNamePattern = "[A-Z][A-Z0-9_]*"
+
+## Regular expression for matching macro used in DSC/DEC/INF file inclusion
+gMacroRefPattern = re.compile("\$\(({})\)".format(_MacroNamePattern), re.UNICODE)
+gMacroDefPattern = re.compile("^(DEFINE|EDK_GLOBAL)[ \t]+")
+gMacroNamePattern = re.compile("^{}$".format(_MacroNamePattern))
+
+# definition for a GUID. used to create regular expressions below.
+_HexChar = r"[0-9a-fA-F]"
+_GuidPattern = r"{Hex}{{8}}-{Hex}{{4}}-{Hex}{{4}}-{Hex}{{4}}-{Hex}{{12}}".format(Hex=_HexChar)
+
+## Regular expressions for GUID matching
+gGuidPattern = re.compile(r'{}'.format(_GuidPattern))
+gGuidPatternEnd = re.compile(r'{}$'.format(_GuidPattern))
+
+## Regular expressions for HEX matching
+g4HexChar = re.compile(r'{}{{4}}'.format(_HexChar))
+gHexPattern = re.compile(r'0[xX]{}+'.format(_HexChar))
+gHexPatternAll = re.compile(r'0[xX]{}+$'.format(_HexChar))
+
+## Regular expressions for string identifier checking
+gIdentifierPattern = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$', re.UNICODE)
+## Regular expression for GUID c structure format
+_GuidCFormatPattern = r"{{\s*0[xX]{Hex}{{1,8}}\s*,\s*0[xX]{Hex}{{1,4}}\s*,\s*0[xX]{Hex}{{1,4}}" \
+ r"\s*,\s*{{\s*0[xX]{Hex}{{1,2}}\s*,\s*0[xX]{Hex}{{1,2}}" \
+ r"\s*,\s*0[xX]{Hex}{{1,2}}\s*,\s*0[xX]{Hex}{{1,2}}" \
+ r"\s*,\s*0[xX]{Hex}{{1,2}}\s*,\s*0[xX]{Hex}{{1,2}}" \
+ r"\s*,\s*0[xX]{Hex}{{1,2}}\s*,\s*0[xX]{Hex}{{1,2}}\s*}}\s*}}".format(Hex=_HexChar)
+gGuidCFormatPattern = re.compile(r"{}".format(_GuidCFormatPattern))
+
+#
+# A global variable for whether current build in AutoGen phase or not.
+#
+gAutoGenPhase = False
+
+#
+# The Conf dir outside the workspace dir
+#
+gConfDirectory = ''
+gCmdConfDir = ''
+gBuildDirectory = ''
+#
+# The relative default database file path
+#
+gDatabasePath = ".cache/build.db"
+
+#
+# Build flag for binary build
+#
+gIgnoreSource = False
+
+#
+# FDF parser
+#
+gFdfParser = None
+
+BuildOptionPcd = []
+
+#
+# Mixed PCD name dict
+#
+MixedPcd = {}
+
+# Structure Pcd dict
+gStructurePcd = {}
+gPcdSkuOverrides={}
+# Pcd name for the Pcd which used in the Conditional directives
+gConditionalPcds = []
+
+gUseHashCache = None
+gBinCacheDest = None
+gBinCacheSource = None
+gPlatformHash = None
+gPlatformHashFile = None
+gPackageHash = None
+gPackageHashFile = None
+gModuleHashFile = None
+gCMakeHashFile = None
+gHashChainStatus = None
+gModulePreMakeCacheStatus = None
+gModuleMakeCacheStatus = None
+gFileHashDict = None
+gModuleAllCacheStatus = None
+gModuleCacheHit = None
+
+gEnableGenfdsMultiThread = True
+gSikpAutoGenCache = set()
+# Common lock for the file access in multiple process AutoGens
+file_lock = None
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/LongFilePathOs.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/LongFilePathOs.py
new file mode 100755
index 00000000..3b3f1444
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/LongFilePathOs.py
@@ -0,0 +1,79 @@
+## @file
+# Override built in module os to provide support for long file path
+#
+# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import absolute_import
+import os
+from . import LongFilePathOsPath
+from Common.LongFilePathSupport import LongFilePath
+import time
+
+path = LongFilePathOsPath
+
+def access(path, mode):
+ return os.access(LongFilePath(path), mode)
+
+def remove(path):
+ Timeout = 0.0
+ while Timeout < 5.0:
+ try:
+ return os.remove(LongFilePath(path))
+ except:
+ time.sleep(0.1)
+ Timeout = Timeout + 0.1
+ return os.remove(LongFilePath(path))
+
+def removedirs(name):
+ return os.removedirs(LongFilePath(name))
+
+def rmdir(path):
+ return os.rmdir(LongFilePath(path))
+
+def mkdir(path):
+ return os.mkdir(LongFilePath(path))
+
+def makedirs(name, mode=0o777):
+ return os.makedirs(LongFilePath(name), mode)
+
+def rename(old, new):
+ return os.rename(LongFilePath(old), LongFilePath(new))
+
+def chdir(path):
+ return os.chdir(LongFilePath(path))
+
+def chmod(path, mode):
+ return os.chmod(LongFilePath(path), mode)
+
+def stat(path):
+ return os.stat(LongFilePath(path))
+
+def utime(path, times):
+ return os.utime(LongFilePath(path), times)
+
+def listdir(path):
+ List = []
+ uList = os.listdir(u"%s" % LongFilePath(path))
+ for Item in uList:
+ List.append(Item)
+ return List
+
+if hasattr(os, 'replace'):
+ def replace(src, dst):
+ return os.replace(LongFilePath(src), LongFilePath(dst))
+
+environ = os.environ
+getcwd = os.getcwd
+chdir = os.chdir
+walk = os.walk
+W_OK = os.W_OK
+F_OK = os.F_OK
+sep = os.sep
+linesep = os.linesep
+getenv = os.getenv
+pathsep = os.pathsep
+name = os.name
+SEEK_SET = os.SEEK_SET
+SEEK_END = os.SEEK_END
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/LongFilePathOsPath.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/LongFilePathOsPath.py
new file mode 100755
index 00000000..54d4dded
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/LongFilePathOsPath.py
@@ -0,0 +1,47 @@
+## @file
+# Override built in module os.path to provide support for long file path
+#
+# Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+import os
+from Common.LongFilePathSupport import LongFilePath
+
+def isfile(path):
+ return os.path.isfile(LongFilePath(path))
+
+def isdir(path):
+ return os.path.isdir(LongFilePath(path))
+
+def exists(path):
+ return os.path.exists(LongFilePath(path))
+
+def getsize(filename):
+ return os.path.getsize(LongFilePath(filename))
+
+def getmtime(filename):
+ return os.path.getmtime(LongFilePath(filename))
+
+def getatime(filename):
+ return os.path.getatime(LongFilePath(filename))
+
+def getctime(filename):
+ return os.path.getctime(LongFilePath(filename))
+
+join = os.path.join
+splitext = os.path.splitext
+splitdrive = os.path.splitdrive
+split = os.path.split
+abspath = os.path.abspath
+basename = os.path.basename
+commonprefix = os.path.commonprefix
+sep = os.path.sep
+normpath = os.path.normpath
+normcase = os.path.normcase
+dirname = os.path.dirname
+islink = os.path.islink
+isabs = os.path.isabs
+realpath = os.path.realpath
+relpath = os.path.relpath
+pardir = os.path.pardir
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/LongFilePathSupport.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/LongFilePathSupport.py
new file mode 100755
index 00000000..193abd26
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/LongFilePathSupport.py
@@ -0,0 +1,45 @@
+## @file
+# Override built in function file.open to provide support for long file path
+#
+# Copyright (c) 2014 - 2015, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+import os
+import platform
+import shutil
+import codecs
+
+##
+# OpenLongPath
+# Convert a file path to a long file path
+#
+def LongFilePath(FileName):
+ FileName = os.path.normpath(FileName)
+ if platform.system() == 'Windows':
+ if FileName.startswith('\\\\?\\'):
+ return FileName
+ if FileName.startswith('\\\\'):
+ return '\\\\?\\UNC\\' + FileName[2:]
+ if os.path.isabs(FileName):
+ return '\\\\?\\' + FileName
+ return FileName
+
+##
+# OpenLongFilePath
+# wrap open to support opening a long file path
+#
+def OpenLongFilePath(FileName, Mode='r', Buffer= -1):
+ return open(LongFilePath(FileName), Mode, Buffer)
+
+def CodecOpenLongFilePath(Filename, Mode='rb', Encoding=None, Errors='strict', Buffering=1):
+ return codecs.open(LongFilePath(Filename), Mode, Encoding, Errors, Buffering)
+
+##
+# CopyLongFilePath
+# wrap copyfile to support copy a long file path
+#
+def CopyLongFilePath(src, dst):
+ with open(LongFilePath(src), 'rb') as fsrc:
+ with open(LongFilePath(dst), 'wb') as fdst:
+ shutil.copyfileobj(fsrc, fdst)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Misc.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Misc.py
new file mode 100755
index 00000000..980fa839
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Misc.py
@@ -0,0 +1,1929 @@
+## @file
+# Common routines used by all tools
+#
+# Copyright (c) 2007 - 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+
+import sys
+import string
+import threading
+import time
+import re
+import pickle
+import array
+import shutil
+import filecmp
+from random import sample
+from struct import pack
+import uuid
+import subprocess
+import tempfile
+from collections import OrderedDict
+
+import Common.LongFilePathOs as os
+from Common import EdkLogger as EdkLogger
+from Common import GlobalData as GlobalData
+from Common.DataType import *
+from Common.BuildToolError import *
+from CommonDataClass.DataClass import *
+from Common.Parsing import GetSplitValueList
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.LongFilePathSupport import CopyLongFilePath as CopyLong
+from Common.LongFilePathSupport import LongFilePath as LongFilePath
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from CommonDataClass.Exceptions import BadExpression
+from Common.caching import cached_property
+import struct
+
+ArrayIndex = re.compile("\[\s*[0-9a-fA-FxX]*\s*\]")
+## Regular expression used to find out place holders in string template
+gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE | re.UNICODE)
+
+## regular expressions for map file processing
+startPatternGeneral = re.compile("^Start[' ']+Length[' ']+Name[' ']+Class")
+addressPatternGeneral = re.compile("^Address[' ']+Publics by Value[' ']+Rva\+Base")
+valuePatternGcc = re.compile('^([\w_\.]+) +([\da-fA-Fx]+) +([\da-fA-Fx]+)$')
+pcdPatternGcc = re.compile('^([\da-fA-Fx]+) +([\da-fA-Fx]+)')
+secReGeneral = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\da-fA-F]+)[Hh]? +([.\w\$]+) +(\w+)', re.UNICODE)
+
+StructPattern = re.compile(r'[_a-zA-Z][0-9A-Za-z_]*$')
+
+## Dictionary used to store dependencies of files
+gDependencyDatabase = {} # arch : {file path : [dependent files list]}
+
+#
+# If a module is built more than once with different PCDs or library classes
+# a temporary INF file with same content is created, the temporary file is removed
+# when build exits.
+#
+_TempInfs = []
+
+def GetVariableOffset(mapfilepath, efifilepath, varnames):
+ """ Parse map file to get variable offset in current EFI file
+ @param mapfilepath Map file absolution path
+ @param efifilepath: EFI binary file full path
+ @param varnames iteratable container whose elements are variable names to be searched
+
+ @return List whos elements are tuple with variable name and raw offset
+ """
+ lines = []
+ try:
+ f = open(mapfilepath, 'r')
+ lines = f.readlines()
+ f.close()
+ except:
+ return None
+
+ if len(lines) == 0: return None
+ firstline = lines[0].strip()
+ if re.match('^\s*Address\s*Size\s*Align\s*Out\s*In\s*Symbol\s*$', firstline):
+ return _parseForXcodeAndClang9(lines, efifilepath, varnames)
+ if (firstline.startswith("Archive member included ") and
+ firstline.endswith(" file (symbol)")):
+ return _parseForGCC(lines, efifilepath, varnames)
+ if firstline.startswith("# Path:"):
+ return _parseForXcodeAndClang9(lines, efifilepath, varnames)
+ return _parseGeneral(lines, efifilepath, varnames)
+
+def _parseForXcodeAndClang9(lines, efifilepath, varnames):
+ status = 0
+ ret = []
+ for line in lines:
+ line = line.strip()
+ if status == 0 and (re.match('^\s*Address\s*Size\s*Align\s*Out\s*In\s*Symbol\s*$', line) \
+ or line == "# Symbols:"):
+ status = 1
+ continue
+ if status == 1 and len(line) != 0:
+ for varname in varnames:
+ if varname in line:
+ # cannot pregenerate this RegEx since it uses varname from varnames.
+ m = re.match('^([\da-fA-FxX]+)([\s\S]*)([_]*%s)$' % varname, line)
+ if m is not None:
+ ret.append((varname, m.group(1)))
+ return ret
+
+def _parseForGCC(lines, efifilepath, varnames):
+ """ Parse map file generated by GCC linker """
+ status = 0
+ sections = []
+ varoffset = []
+ for index, line in enumerate(lines):
+ line = line.strip()
+ # status machine transection
+ if status == 0 and line == "Memory Configuration":
+ status = 1
+ continue
+ elif status == 1 and line == 'Linker script and memory map':
+ status = 2
+ continue
+ elif status ==2 and line == 'START GROUP':
+ status = 3
+ continue
+
+ # status handler
+ if status == 3:
+ m = valuePatternGcc.match(line)
+ if m is not None:
+ sections.append(m.groups(0))
+ for varname in varnames:
+ Str = ''
+ m = re.match("^.data.(%s)" % varname, line)
+ if m is not None:
+ m = re.match(".data.(%s)$" % varname, line)
+ if m is not None:
+ Str = lines[index + 1]
+ else:
+ Str = line[len(".data.%s" % varname):]
+ if Str:
+ m = pcdPatternGcc.match(Str.strip())
+ if m is not None:
+ varoffset.append((varname, int(m.groups(0)[0], 16), int(sections[-1][1], 16), sections[-1][0]))
+
+ if not varoffset:
+ return []
+ # get section information from efi file
+ efisecs = PeImageClass(efifilepath).SectionHeaderList
+ if efisecs is None or len(efisecs) == 0:
+ return []
+ #redirection
+ redirection = 0
+ for efisec in efisecs:
+ for section in sections:
+ if section[0].strip() == efisec[0].strip() and section[0].strip() == '.text':
+ redirection = int(section[1], 16) - efisec[1]
+
+ ret = []
+ for var in varoffset:
+ for efisec in efisecs:
+ if var[1] >= efisec[1] and var[1] < efisec[1]+efisec[3]:
+ ret.append((var[0], hex(efisec[2] + var[1] - efisec[1] - redirection)))
+ return ret
+
+def _parseGeneral(lines, efifilepath, varnames):
+ status = 0 #0 - beginning of file; 1 - PE section definition; 2 - symbol table
+ secs = [] # key = section name
+ varoffset = []
+ symRe = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\.:\\\\\w\?@\$-]+) +([\da-fA-F]+)', re.UNICODE)
+
+ for line in lines:
+ line = line.strip()
+ if startPatternGeneral.match(line):
+ status = 1
+ continue
+ if addressPatternGeneral.match(line):
+ status = 2
+ continue
+ if line.startswith("entry point at"):
+ status = 3
+ continue
+ if status == 1 and len(line) != 0:
+ m = secReGeneral.match(line)
+ assert m is not None, "Fail to parse the section in map file , line is %s" % line
+ sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)
+ secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])
+ if status == 2 and len(line) != 0:
+ for varname in varnames:
+ m = symRe.match(line)
+ assert m is not None, "Fail to parse the symbol in map file, line is %s" % line
+ sec_no, sym_offset, sym_name, vir_addr = m.groups(0)
+ sec_no = int(sec_no, 16)
+ sym_offset = int(sym_offset, 16)
+ vir_addr = int(vir_addr, 16)
+ # cannot pregenerate this RegEx since it uses varname from varnames.
+ m2 = re.match('^[_]*(%s)' % varname, sym_name)
+ if m2 is not None:
+ # fond a binary pcd entry in map file
+ for sec in secs:
+ if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]):
+ varoffset.append([varname, sec[3], sym_offset, vir_addr, sec_no])
+
+ if not varoffset: return []
+
+ # get section information from efi file
+ efisecs = PeImageClass(efifilepath).SectionHeaderList
+ if efisecs is None or len(efisecs) == 0:
+ return []
+
+ ret = []
+ for var in varoffset:
+ index = 0
+ for efisec in efisecs:
+ index = index + 1
+ if var[1].strip() == efisec[0].strip():
+ ret.append((var[0], hex(efisec[2] + var[2])))
+ elif var[4] == index:
+ ret.append((var[0], hex(efisec[2] + var[2])))
+
+ return ret
+
+## Routine to process duplicated INF
+#
+# This function is called by following two cases:
+# Case 1 in DSC:
+# [components.arch]
+# Pkg/module/module.inf
+# Pkg/module/module.inf {
+# <Defines>
+# FILE_GUID = 0D1B936F-68F3-4589-AFCC-FB8B7AEBC836
+# }
+# Case 2 in FDF:
+# INF Pkg/module/module.inf
+# INF FILE_GUID = 0D1B936F-68F3-4589-AFCC-FB8B7AEBC836 Pkg/module/module.inf
+#
+# This function copies Pkg/module/module.inf to
+# Conf/.cache/0D1B936F-68F3-4589-AFCC-FB8B7AEBC836module.inf
+#
+# @param Path Original PathClass object
+# @param BaseName New file base name
+#
+# @retval return the new PathClass object
+#
+def ProcessDuplicatedInf(Path, BaseName, Workspace):
+ Filename = os.path.split(Path.File)[1]
+ if '.' in Filename:
+ Filename = BaseName + Path.BaseName + Filename[Filename.rfind('.'):]
+ else:
+ Filename = BaseName + Path.BaseName
+
+ DbDir = os.path.split(GlobalData.gDatabasePath)[0]
+
+ #
+ # A temporary INF is copied to database path which must have write permission
+ # The temporary will be removed at the end of build
+ # In case of name conflict, the file name is
+ # FILE_GUIDBaseName (0D1B936F-68F3-4589-AFCC-FB8B7AEBC836module.inf)
+ #
+ TempFullPath = os.path.join(DbDir,
+ Filename)
+ RtPath = PathClass(Path.File, Workspace)
+ #
+ # Modify the full path to temporary path, keep other unchanged
+ #
+ # To build same module more than once, the module path with FILE_GUID overridden has
+ # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
+ # in DSC which is used as relative path by C files and other files in INF.
+ # A trick was used: all module paths are PathClass instances, after the initialization
+ # of PathClass, the PathClass.Path is overridden by the temporary INF path.
+ #
+ # The reason for creating a temporary INF is:
+ # Platform.Modules which is the base to create ModuleAutoGen objects is a dictionary,
+ # the key is the full path of INF, the value is an object to save overridden library instances, PCDs.
+ # A different key for the same module is needed to create different output directory,
+ # retrieve overridden PCDs, library instances.
+ #
+ # The BaseName is the FILE_GUID which is also the output directory name.
+ #
+ #
+ RtPath.Path = TempFullPath
+ RtPath.BaseName = BaseName
+ RtPath.OriginalPath = Path
+ #
+ # If file exists, compare contents
+ #
+ if os.path.exists(TempFullPath):
+ with open(str(Path), 'rb') as f1, open(TempFullPath, 'rb') as f2:
+ if f1.read() == f2.read():
+ return RtPath
+ _TempInfs.append(TempFullPath)
+ shutil.copy2(str(Path), TempFullPath)
+ return RtPath
+
+## Remove temporary created INFs whose paths were saved in _TempInfs
+#
+def ClearDuplicatedInf():
+ while _TempInfs:
+ File = _TempInfs.pop()
+ if os.path.exists(File):
+ os.remove(File)
+
+## Convert GUID string in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx style to C structure style
+#
+# @param Guid The GUID string
+#
+# @retval string The GUID string in C structure style
+#
+def GuidStringToGuidStructureString(Guid):
+ GuidList = Guid.split('-')
+ Result = '{'
+ for Index in range(0, 3, 1):
+ Result = Result + '0x' + GuidList[Index] + ', '
+ Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4]
+ for Index in range(0, 12, 2):
+ Result = Result + ', 0x' + GuidList[4][Index:Index + 2]
+ Result += '}}'
+ return Result
+
+## Convert GUID structure in byte array to xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+#
+# @param GuidValue The GUID value in byte array
+#
+# @retval string The GUID value in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx format
+#
+def GuidStructureByteArrayToGuidString(GuidValue):
+ guidValueString = GuidValue.lower().replace("{", "").replace("}", "").replace(" ", "").replace(";", "")
+ guidValueList = guidValueString.split(",")
+ if len(guidValueList) != 16:
+ return ''
+ #EdkLogger.error(None, None, "Invalid GUID value string %s" % GuidValue)
+ try:
+ return "%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x" % (
+ int(guidValueList[3], 16),
+ int(guidValueList[2], 16),
+ int(guidValueList[1], 16),
+ int(guidValueList[0], 16),
+ int(guidValueList[5], 16),
+ int(guidValueList[4], 16),
+ int(guidValueList[7], 16),
+ int(guidValueList[6], 16),
+ int(guidValueList[8], 16),
+ int(guidValueList[9], 16),
+ int(guidValueList[10], 16),
+ int(guidValueList[11], 16),
+ int(guidValueList[12], 16),
+ int(guidValueList[13], 16),
+ int(guidValueList[14], 16),
+ int(guidValueList[15], 16)
+ )
+ except:
+ return ''
+
+## Convert GUID string in C structure style to xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+#
+# @param GuidValue The GUID value in C structure format
+#
+# @retval string The GUID value in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx format
+#
+def GuidStructureStringToGuidString(GuidValue):
+ if not GlobalData.gGuidCFormatPattern.match(GuidValue):
+ return ''
+ guidValueString = GuidValue.lower().replace("{", "").replace("}", "").replace(" ", "").replace(";", "")
+ guidValueList = guidValueString.split(",")
+ if len(guidValueList) != 11:
+ return ''
+ #EdkLogger.error(None, None, "Invalid GUID value string %s" % GuidValue)
+ try:
+ return "%08x-%04x-%04x-%02x%02x-%02x%02x%02x%02x%02x%02x" % (
+ int(guidValueList[0], 16),
+ int(guidValueList[1], 16),
+ int(guidValueList[2], 16),
+ int(guidValueList[3], 16),
+ int(guidValueList[4], 16),
+ int(guidValueList[5], 16),
+ int(guidValueList[6], 16),
+ int(guidValueList[7], 16),
+ int(guidValueList[8], 16),
+ int(guidValueList[9], 16),
+ int(guidValueList[10], 16)
+ )
+ except:
+ return ''
+
+## Convert GUID string in C structure style to xxxxxxxx_xxxx_xxxx_xxxx_xxxxxxxxxxxx
+#
+# @param GuidValue The GUID value in C structure format
+#
+# @retval string The GUID value in xxxxxxxx_xxxx_xxxx_xxxx_xxxxxxxxxxxx format
+#
+def GuidStructureStringToGuidValueName(GuidValue):
+ guidValueString = GuidValue.lower().replace("{", "").replace("}", "").replace(" ", "")
+ guidValueList = guidValueString.split(",")
+ if len(guidValueList) != 11:
+ EdkLogger.error(None, FORMAT_INVALID, "Invalid GUID value string [%s]" % GuidValue)
+ return "%08x_%04x_%04x_%02x%02x_%02x%02x%02x%02x%02x%02x" % (
+ int(guidValueList[0], 16),
+ int(guidValueList[1], 16),
+ int(guidValueList[2], 16),
+ int(guidValueList[3], 16),
+ int(guidValueList[4], 16),
+ int(guidValueList[5], 16),
+ int(guidValueList[6], 16),
+ int(guidValueList[7], 16),
+ int(guidValueList[8], 16),
+ int(guidValueList[9], 16),
+ int(guidValueList[10], 16)
+ )
+
+## Create directories
+#
+# @param Directory The directory name
+#
+def CreateDirectory(Directory):
+ if Directory is None or Directory.strip() == "":
+ return True
+ try:
+ if not os.access(Directory, os.F_OK):
+ os.makedirs(Directory)
+ except:
+ return False
+ return True
+
+## Remove directories, including files and sub-directories in it
+#
+# @param Directory The directory name
+#
+def RemoveDirectory(Directory, Recursively=False):
+ if Directory is None or Directory.strip() == "" or not os.path.exists(Directory):
+ return
+ if Recursively:
+ CurrentDirectory = os.getcwd()
+ os.chdir(Directory)
+ for File in os.listdir("."):
+ if os.path.isdir(File):
+ RemoveDirectory(File, Recursively)
+ else:
+ os.remove(File)
+ os.chdir(CurrentDirectory)
+ os.rmdir(Directory)
+
+## Store content in file
+#
+# This method is used to save file only when its content is changed. This is
+# quite useful for "make" system to decide what will be re-built and what won't.
+#
+# @param File The path of file
+# @param Content The new content of the file
+# @param IsBinaryFile The flag indicating if the file is binary file or not
+#
+# @retval True If the file content is changed and the file is renewed
+# @retval False If the file content is the same
+#
+def SaveFileOnChange(File, Content, IsBinaryFile=True, FileLock=None):
+
+ # Convert to long file path format
+ File = LongFilePath(File)
+
+ if os.path.exists(File):
+ if IsBinaryFile:
+ try:
+ with open(File, "rb") as f:
+ if Content == f.read():
+ return False
+ except:
+ EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File)
+ else:
+ try:
+ with open(File, "r") as f:
+ if Content == f.read():
+ return False
+ except:
+ EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File)
+
+ DirName = os.path.dirname(File)
+ if not CreateDirectory(DirName):
+ EdkLogger.error(None, FILE_CREATE_FAILURE, "Could not create directory %s" % DirName)
+ else:
+ if DirName == '':
+ DirName = os.getcwd()
+ if not os.access(DirName, os.W_OK):
+ EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName)
+
+ OpenMode = "w"
+ if IsBinaryFile:
+ OpenMode = "wb"
+
+ # use default file_lock if no input new lock
+ if not FileLock:
+ FileLock = GlobalData.file_lock
+ if FileLock:
+ FileLock.acquire()
+
+
+ if GlobalData.gIsWindows and not os.path.exists(File):
+ try:
+ with open(File, OpenMode) as tf:
+ tf.write(Content)
+ except IOError as X:
+ if GlobalData.gBinCacheSource:
+ EdkLogger.quiet("[cache error]:fails to save file with error: %s" % (X))
+ else:
+ EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
+ finally:
+ if FileLock:
+ FileLock.release()
+ else:
+ try:
+ with open(File, OpenMode) as Fd:
+ Fd.write(Content)
+ except IOError as X:
+ if GlobalData.gBinCacheSource:
+ EdkLogger.quiet("[cache error]:fails to save file with error: %s" % (X))
+ else:
+ EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
+ finally:
+ if FileLock:
+ FileLock.release()
+
+ return True
+
+## Copy source file only if it is different from the destination file
+#
+# This method is used to copy file only if the source file and destination
+# file content are different. This is quite useful to avoid duplicated
+# file writing.
+#
+# @param SrcFile The path of source file
+# @param Dst The path of destination file or folder
+#
+# @retval True The two files content are different and the file is copied
+# @retval False No copy really happen
+#
+def CopyFileOnChange(SrcFile, Dst, FileLock=None):
+
+ # Convert to long file path format
+ SrcFile = LongFilePath(SrcFile)
+ Dst = LongFilePath(Dst)
+
+ if os.path.isdir(SrcFile):
+ EdkLogger.error(None, FILE_COPY_FAILURE, ExtraData='CopyFileOnChange SrcFile is a dir, not a file: %s' % SrcFile)
+ return False
+
+ if os.path.isdir(Dst):
+ DstFile = os.path.join(Dst, os.path.basename(SrcFile))
+ else:
+ DstFile = Dst
+
+ if os.path.exists(DstFile) and filecmp.cmp(SrcFile, DstFile, shallow=False):
+ return False
+
+ DirName = os.path.dirname(DstFile)
+ if not CreateDirectory(DirName):
+ EdkLogger.error(None, FILE_CREATE_FAILURE, "Could not create directory %s" % DirName)
+ else:
+ if DirName == '':
+ DirName = os.getcwd()
+ if not os.access(DirName, os.W_OK):
+ EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName)
+
+ # use default file_lock if no input new lock
+ if not FileLock:
+ FileLock = GlobalData.file_lock
+ if FileLock:
+ FileLock.acquire()
+
+ try:
+ CopyLong(SrcFile, DstFile)
+ except IOError as X:
+ if GlobalData.gBinCacheSource:
+ EdkLogger.quiet("[cache error]:fails to copy file with error: %s" % (X))
+ else:
+ EdkLogger.error(None, FILE_COPY_FAILURE, ExtraData='IOError %s' % X)
+ finally:
+ if FileLock:
+ FileLock.release()
+
+ return True
+
+## Retrieve and cache the real path name in file system
+#
+# @param Root The root directory of path relative to
+#
+# @retval str The path string if the path exists
+# @retval None If path doesn't exist
+#
+class DirCache:
+ _CACHE_ = set()
+ _UPPER_CACHE_ = {}
+
+ def __init__(self, Root):
+ self._Root = Root
+ for F in os.listdir(Root):
+ self._CACHE_.add(F)
+ self._UPPER_CACHE_[F.upper()] = F
+
+ # =[] operator
+ def __getitem__(self, Path):
+ Path = Path[len(os.path.commonprefix([Path, self._Root])):]
+ if not Path:
+ return self._Root
+ if Path and Path[0] == os.path.sep:
+ Path = Path[1:]
+ if Path in self._CACHE_:
+ return os.path.join(self._Root, Path)
+ UpperPath = Path.upper()
+ if UpperPath in self._UPPER_CACHE_:
+ return os.path.join(self._Root, self._UPPER_CACHE_[UpperPath])
+
+ IndexList = []
+ LastSepIndex = -1
+ SepIndex = Path.find(os.path.sep)
+ while SepIndex > -1:
+ Parent = UpperPath[:SepIndex]
+ if Parent not in self._UPPER_CACHE_:
+ break
+ LastSepIndex = SepIndex
+ SepIndex = Path.find(os.path.sep, LastSepIndex + 1)
+
+ if LastSepIndex == -1:
+ return None
+
+ Cwd = os.getcwd()
+ os.chdir(self._Root)
+ SepIndex = LastSepIndex
+ while SepIndex > -1:
+ Parent = Path[:SepIndex]
+ ParentKey = UpperPath[:SepIndex]
+ if ParentKey not in self._UPPER_CACHE_:
+ os.chdir(Cwd)
+ return None
+
+ if Parent in self._CACHE_:
+ ParentDir = Parent
+ else:
+ ParentDir = self._UPPER_CACHE_[ParentKey]
+ for F in os.listdir(ParentDir):
+ Dir = os.path.join(ParentDir, F)
+ self._CACHE_.add(Dir)
+ self._UPPER_CACHE_[Dir.upper()] = Dir
+
+ SepIndex = Path.find(os.path.sep, SepIndex + 1)
+
+ os.chdir(Cwd)
+ if Path in self._CACHE_:
+ return os.path.join(self._Root, Path)
+ elif UpperPath in self._UPPER_CACHE_:
+ return os.path.join(self._Root, self._UPPER_CACHE_[UpperPath])
+ return None
+
+def RealPath(File, Dir='', OverrideDir=''):
+ NewFile = os.path.normpath(os.path.join(Dir, File))
+ NewFile = GlobalData.gAllFiles[NewFile]
+ if not NewFile and OverrideDir:
+ NewFile = os.path.normpath(os.path.join(OverrideDir, File))
+ NewFile = GlobalData.gAllFiles[NewFile]
+ return NewFile
+
+## Get GUID value from given packages
+#
+# @param CName The CName of the GUID
+# @param PackageList List of packages looking-up in
+# @param Inffile The driver file
+#
+# @retval GuidValue if the CName is found in any given package
+# @retval None if the CName is not found in all given packages
+#
+def GuidValue(CName, PackageList, Inffile = None):
+ for P in PackageList:
+ GuidKeys = list(P.Guids.keys())
+ if Inffile and P._PrivateGuids:
+ if not Inffile.startswith(P.MetaFile.Dir):
+ GuidKeys = [x for x in P.Guids if x not in P._PrivateGuids]
+ if CName in GuidKeys:
+ return P.Guids[CName]
+ return None
+
+## A string template class
+#
+# This class implements a template for string replacement. A string template
+# looks like following
+#
+# ${BEGIN} other_string ${placeholder_name} other_string ${END}
+#
+# The string between ${BEGIN} and ${END} will be repeated as many times as the
+# length of "placeholder_name", which is a list passed through a dict. The
+# "placeholder_name" is the key name of the dict. The ${BEGIN} and ${END} can
+# be not used and, in this case, the "placeholder_name" must not a list and it
+# will just be replaced once.
+#
+class TemplateString(object):
+ _REPEAT_START_FLAG = "BEGIN"
+ _REPEAT_END_FLAG = "END"
+
+ class Section(object):
+ _LIST_TYPES = [type([]), type(set()), type((0,))]
+
+ def __init__(self, TemplateSection, PlaceHolderList):
+ self._Template = TemplateSection
+ self._PlaceHolderList = []
+
+ # Split the section into sub-sections according to the position of placeholders
+ if PlaceHolderList:
+ self._SubSectionList = []
+ SubSectionStart = 0
+ #
+ # The placeholders passed in must be in the format of
+ #
+ # PlaceHolderName, PlaceHolderStartPoint, PlaceHolderEndPoint
+ #
+ for PlaceHolder, Start, End in PlaceHolderList:
+ self._SubSectionList.append(TemplateSection[SubSectionStart:Start])
+ self._SubSectionList.append(TemplateSection[Start:End])
+ self._PlaceHolderList.append(PlaceHolder)
+ SubSectionStart = End
+ if SubSectionStart < len(TemplateSection):
+ self._SubSectionList.append(TemplateSection[SubSectionStart:])
+ else:
+ self._SubSectionList = [TemplateSection]
+
+ def __str__(self):
+ return self._Template + " : " + str(self._PlaceHolderList)
+
+ def Instantiate(self, PlaceHolderValues):
+ RepeatTime = -1
+ RepeatPlaceHolders = {}
+ NonRepeatPlaceHolders = {}
+
+ for PlaceHolder in self._PlaceHolderList:
+ if PlaceHolder not in PlaceHolderValues:
+ continue
+ Value = PlaceHolderValues[PlaceHolder]
+ if type(Value) in self._LIST_TYPES:
+ if RepeatTime < 0:
+ RepeatTime = len(Value)
+ elif RepeatTime != len(Value):
+ EdkLogger.error(
+ "TemplateString",
+ PARAMETER_INVALID,
+ "${%s} has different repeat time from others!" % PlaceHolder,
+ ExtraData=str(self._Template)
+ )
+ RepeatPlaceHolders["${%s}" % PlaceHolder] = Value
+ else:
+ NonRepeatPlaceHolders["${%s}" % PlaceHolder] = Value
+
+ if NonRepeatPlaceHolders:
+ StringList = []
+ for S in self._SubSectionList:
+ if S not in NonRepeatPlaceHolders:
+ StringList.append(S)
+ else:
+ StringList.append(str(NonRepeatPlaceHolders[S]))
+ else:
+ StringList = self._SubSectionList
+
+ if RepeatPlaceHolders:
+ TempStringList = []
+ for Index in range(RepeatTime):
+ for S in StringList:
+ if S not in RepeatPlaceHolders:
+ TempStringList.append(S)
+ else:
+ TempStringList.append(str(RepeatPlaceHolders[S][Index]))
+ StringList = TempStringList
+
+ return "".join(StringList)
+
+ ## Constructor
+ def __init__(self, Template=None):
+ self.String = []
+ self.IsBinary = False
+ self._Template = Template
+ self._TemplateSectionList = self._Parse(Template)
+
+ ## str() operator
+ #
+ # @retval string The string replaced
+ #
+ def __str__(self):
+ return "".join(self.String)
+
+ ## Split the template string into fragments per the ${BEGIN} and ${END} flags
+ #
+ # @retval list A list of TemplateString.Section objects
+ #
+ def _Parse(self, Template):
+ SectionStart = 0
+ SearchFrom = 0
+ MatchEnd = 0
+ PlaceHolderList = []
+ TemplateSectionList = []
+ while Template:
+ MatchObj = gPlaceholderPattern.search(Template, SearchFrom)
+ if not MatchObj:
+ if MatchEnd <= len(Template):
+ TemplateSection = TemplateString.Section(Template[SectionStart:], PlaceHolderList)
+ TemplateSectionList.append(TemplateSection)
+ break
+
+ MatchString = MatchObj.group(1)
+ MatchStart = MatchObj.start()
+ MatchEnd = MatchObj.end()
+
+ if MatchString == self._REPEAT_START_FLAG:
+ if MatchStart > SectionStart:
+ TemplateSection = TemplateString.Section(Template[SectionStart:MatchStart], PlaceHolderList)
+ TemplateSectionList.append(TemplateSection)
+ SectionStart = MatchEnd
+ PlaceHolderList = []
+ elif MatchString == self._REPEAT_END_FLAG:
+ TemplateSection = TemplateString.Section(Template[SectionStart:MatchStart], PlaceHolderList)
+ TemplateSectionList.append(TemplateSection)
+ SectionStart = MatchEnd
+ PlaceHolderList = []
+ else:
+ PlaceHolderList.append((MatchString, MatchStart - SectionStart, MatchEnd - SectionStart))
+ SearchFrom = MatchEnd
+ return TemplateSectionList
+
+ ## Replace the string template with dictionary of placeholders and append it to previous one
+ #
+ # @param AppendString The string template to append
+ # @param Dictionary The placeholder dictionaries
+ #
+ def Append(self, AppendString, Dictionary=None):
+ if Dictionary:
+ SectionList = self._Parse(AppendString)
+ self.String.append( "".join(S.Instantiate(Dictionary) for S in SectionList))
+ else:
+ if isinstance(AppendString,list):
+ self.String.extend(AppendString)
+ else:
+ self.String.append(AppendString)
+
+ ## Replace the string template with dictionary of placeholders
+ #
+ # @param Dictionary The placeholder dictionaries
+ #
+ # @retval str The string replaced with placeholder values
+ #
+ def Replace(self, Dictionary=None):
+ return "".join(S.Instantiate(Dictionary) for S in self._TemplateSectionList)
+
+## Progress indicator class
+#
+# This class makes use of thread to print progress on console.
+#
+class Progressor:
+ # for avoiding deadloop
+ _StopFlag = None
+ _ProgressThread = None
+ _CheckInterval = 0.25
+
+ ## Constructor
+ #
+ # @param OpenMessage The string printed before progress characters
+ # @param CloseMessage The string printed after progress characters
+ # @param ProgressChar The character used to indicate the progress
+ # @param Interval The interval in seconds between two progress characters
+ #
+ def __init__(self, OpenMessage="", CloseMessage="", ProgressChar='.', Interval=1.0):
+ self.PromptMessage = OpenMessage
+ self.CodaMessage = CloseMessage
+ self.ProgressChar = ProgressChar
+ self.Interval = Interval
+ if Progressor._StopFlag is None:
+ Progressor._StopFlag = threading.Event()
+
+ ## Start to print progress character
+ #
+ # @param OpenMessage The string printed before progress characters
+ #
+ def Start(self, OpenMessage=None):
+ if OpenMessage is not None:
+ self.PromptMessage = OpenMessage
+ Progressor._StopFlag.clear()
+ if Progressor._ProgressThread is None:
+ Progressor._ProgressThread = threading.Thread(target=self._ProgressThreadEntry)
+ Progressor._ProgressThread.setDaemon(False)
+ Progressor._ProgressThread.start()
+
+ ## Stop printing progress character
+ #
+ # @param CloseMessage The string printed after progress characters
+ #
+ def Stop(self, CloseMessage=None):
+ OriginalCodaMessage = self.CodaMessage
+ if CloseMessage is not None:
+ self.CodaMessage = CloseMessage
+ self.Abort()
+ self.CodaMessage = OriginalCodaMessage
+
+ ## Thread entry method
+ def _ProgressThreadEntry(self):
+ sys.stdout.write(self.PromptMessage + " ")
+ sys.stdout.flush()
+ TimeUp = 0.0
+ while not Progressor._StopFlag.isSet():
+ if TimeUp <= 0.0:
+ sys.stdout.write(self.ProgressChar)
+ sys.stdout.flush()
+ TimeUp = self.Interval
+ time.sleep(self._CheckInterval)
+ TimeUp -= self._CheckInterval
+ sys.stdout.write(" " + self.CodaMessage + "\n")
+ sys.stdout.flush()
+
+ ## Abort the progress display
+ @staticmethod
+ def Abort():
+ if Progressor._StopFlag is not None:
+ Progressor._StopFlag.set()
+ if Progressor._ProgressThread is not None:
+ Progressor._ProgressThread.join()
+ Progressor._ProgressThread = None
+
+
+## Dictionary using prioritized list as key
+#
+class tdict:
+ _ListType = type([])
+ _TupleType = type(())
+ _Wildcard = 'COMMON'
+ _ValidWildcardList = ['COMMON', 'DEFAULT', 'ALL', TAB_STAR, 'PLATFORM']
+
+ def __init__(self, _Single_=False, _Level_=2):
+ self._Level_ = _Level_
+ self.data = {}
+ self._Single_ = _Single_
+
+ # =[] operator
+ def __getitem__(self, key):
+ KeyType = type(key)
+ RestKeys = None
+ if KeyType == self._ListType or KeyType == self._TupleType:
+ FirstKey = key[0]
+ if len(key) > 1:
+ RestKeys = key[1:]
+ elif self._Level_ > 1:
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
+ else:
+ FirstKey = key
+ if self._Level_ > 1:
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
+
+ if FirstKey is None or str(FirstKey).upper() in self._ValidWildcardList:
+ FirstKey = self._Wildcard
+
+ if self._Single_:
+ return self._GetSingleValue(FirstKey, RestKeys)
+ else:
+ return self._GetAllValues(FirstKey, RestKeys)
+
+ def _GetSingleValue(self, FirstKey, RestKeys):
+ Value = None
+ #print "%s-%s" % (FirstKey, self._Level_) ,
+ if self._Level_ > 1:
+ if FirstKey == self._Wildcard:
+ if FirstKey in self.data:
+ Value = self.data[FirstKey][RestKeys]
+ if Value is None:
+ for Key in self.data:
+ Value = self.data[Key][RestKeys]
+ if Value is not None: break
+ else:
+ if FirstKey in self.data:
+ Value = self.data[FirstKey][RestKeys]
+ if Value is None and self._Wildcard in self.data:
+ #print "Value=None"
+ Value = self.data[self._Wildcard][RestKeys]
+ else:
+ if FirstKey == self._Wildcard:
+ if FirstKey in self.data:
+ Value = self.data[FirstKey]
+ if Value is None:
+ for Key in self.data:
+ Value = self.data[Key]
+ if Value is not None: break
+ else:
+ if FirstKey in self.data:
+ Value = self.data[FirstKey]
+ elif self._Wildcard in self.data:
+ Value = self.data[self._Wildcard]
+ return Value
+
+ def _GetAllValues(self, FirstKey, RestKeys):
+ Value = []
+ if self._Level_ > 1:
+ if FirstKey == self._Wildcard:
+ for Key in self.data:
+ Value += self.data[Key][RestKeys]
+ else:
+ if FirstKey in self.data:
+ Value += self.data[FirstKey][RestKeys]
+ if self._Wildcard in self.data:
+ Value += self.data[self._Wildcard][RestKeys]
+ else:
+ if FirstKey == self._Wildcard:
+ for Key in self.data:
+ Value.append(self.data[Key])
+ else:
+ if FirstKey in self.data:
+ Value.append(self.data[FirstKey])
+ if self._Wildcard in self.data:
+ Value.append(self.data[self._Wildcard])
+ return Value
+
+ ## []= operator
+ def __setitem__(self, key, value):
+ KeyType = type(key)
+ RestKeys = None
+ if KeyType == self._ListType or KeyType == self._TupleType:
+ FirstKey = key[0]
+ if len(key) > 1:
+ RestKeys = key[1:]
+ else:
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
+ else:
+ FirstKey = key
+ if self._Level_ > 1:
+ RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
+
+ if FirstKey in self._ValidWildcardList:
+ FirstKey = self._Wildcard
+
+ if FirstKey not in self.data and self._Level_ > 0:
+ self.data[FirstKey] = tdict(self._Single_, self._Level_ - 1)
+
+ if self._Level_ > 1:
+ self.data[FirstKey][RestKeys] = value
+ else:
+ self.data[FirstKey] = value
+
+ def SetGreedyMode(self):
+ self._Single_ = False
+ if self._Level_ > 1:
+ for Key in self.data:
+ self.data[Key].SetGreedyMode()
+
+ def SetSingleMode(self):
+ self._Single_ = True
+ if self._Level_ > 1:
+ for Key in self.data:
+ self.data[Key].SetSingleMode()
+
+ def GetKeys(self, KeyIndex=0):
+ assert KeyIndex >= 0
+ if KeyIndex == 0:
+ return set(self.data.keys())
+ else:
+ keys = set()
+ for Key in self.data:
+ keys |= self.data[Key].GetKeys(KeyIndex - 1)
+ return keys
+
+def AnalyzePcdExpression(Setting):
+ RanStr = ''.join(sample(string.ascii_letters + string.digits, 8))
+ Setting = Setting.replace('\\\\', RanStr).strip()
+ # There might be escaped quote in a string: \", \\\" , \', \\\'
+ Data = Setting
+ # There might be '|' in string and in ( ... | ... ), replace it with '-'
+ NewStr = ''
+ InSingleQuoteStr = False
+ InDoubleQuoteStr = False
+ Pair = 0
+ for Index, ch in enumerate(Data):
+ if ch == '"' and not InSingleQuoteStr:
+ if Data[Index - 1] != '\\':
+ InDoubleQuoteStr = not InDoubleQuoteStr
+ elif ch == "'" and not InDoubleQuoteStr:
+ if Data[Index - 1] != '\\':
+ InSingleQuoteStr = not InSingleQuoteStr
+ elif ch == '(' and not (InSingleQuoteStr or InDoubleQuoteStr):
+ Pair += 1
+ elif ch == ')' and not (InSingleQuoteStr or InDoubleQuoteStr):
+ Pair -= 1
+
+ if (Pair > 0 or InSingleQuoteStr or InDoubleQuoteStr) and ch == TAB_VALUE_SPLIT:
+ NewStr += '-'
+ else:
+ NewStr += ch
+ FieldList = []
+ StartPos = 0
+ while True:
+ Pos = NewStr.find(TAB_VALUE_SPLIT, StartPos)
+ if Pos < 0:
+ FieldList.append(Setting[StartPos:].strip())
+ break
+ FieldList.append(Setting[StartPos:Pos].strip())
+ StartPos = Pos + 1
+ for i, ch in enumerate(FieldList):
+ if RanStr in ch:
+ FieldList[i] = ch.replace(RanStr,'\\\\')
+ return FieldList
+
+def ParseFieldValue (Value):
+ def ParseDevPathValue (Value):
+ if '\\' in Value:
+ Value.replace('\\', '/').replace(' ', '')
+
+ Cmd = 'DevicePath ' + '"' + Value + '"'
+ try:
+ p = subprocess.Popen(Cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ out, err = p.communicate()
+ except Exception as X:
+ raise BadExpression("DevicePath: %s" % (str(X)) )
+ finally:
+ subprocess._cleanup()
+ p.stdout.close()
+ p.stderr.close()
+ if err:
+ raise BadExpression("DevicePath: %s" % str(err))
+ out = out.decode()
+ Size = len(out.split())
+ out = ','.join(out.split())
+ return '{' + out + '}', Size
+
+ if "{CODE(" in Value:
+ return Value, len(Value.split(","))
+ if isinstance(Value, type(0)):
+ return Value, (Value.bit_length() + 7) // 8
+ if not isinstance(Value, type('')):
+ raise BadExpression('Type %s is %s' %(Value, type(Value)))
+ Value = Value.strip()
+ if Value.startswith(TAB_UINT8) and Value.endswith(')'):
+ Value, Size = ParseFieldValue(Value.split('(', 1)[1][:-1])
+ if Size > 1:
+ raise BadExpression('Value (%s) Size larger than %d' %(Value, Size))
+ return Value, 1
+ if Value.startswith(TAB_UINT16) and Value.endswith(')'):
+ Value, Size = ParseFieldValue(Value.split('(', 1)[1][:-1])
+ if Size > 2:
+ raise BadExpression('Value (%s) Size larger than %d' %(Value, Size))
+ return Value, 2
+ if Value.startswith(TAB_UINT32) and Value.endswith(')'):
+ Value, Size = ParseFieldValue(Value.split('(', 1)[1][:-1])
+ if Size > 4:
+ raise BadExpression('Value (%s) Size larger than %d' %(Value, Size))
+ return Value, 4
+ if Value.startswith(TAB_UINT64) and Value.endswith(')'):
+ Value, Size = ParseFieldValue(Value.split('(', 1)[1][:-1])
+ if Size > 8:
+ raise BadExpression('Value (%s) Size larger than %d' % (Value, Size))
+ return Value, 8
+ if Value.startswith(TAB_GUID) and Value.endswith(')'):
+ Value = Value.split('(', 1)[1][:-1].strip()
+ if Value[0] == '{' and Value[-1] == '}':
+ TmpValue = GuidStructureStringToGuidString(Value)
+ if not TmpValue:
+ raise BadExpression("Invalid GUID value string %s" % Value)
+ Value = TmpValue
+ if Value[0] == '"' and Value[-1] == '"':
+ Value = Value[1:-1]
+ try:
+ Value = uuid.UUID(Value).bytes_le
+ ValueL, ValueH = struct.unpack('2Q', Value)
+ Value = (ValueH << 64 ) | ValueL
+
+ except ValueError as Message:
+ raise BadExpression(Message)
+ return Value, 16
+ if Value.startswith('L"') and Value.endswith('"'):
+ # Unicode String
+ # translate escape character
+ Value = Value[1:]
+ try:
+ Value = eval(Value)
+ except:
+ Value = Value[1:-1]
+ List = list(Value)
+ List.reverse()
+ Value = 0
+ for Char in List:
+ Value = (Value << 16) | ord(Char)
+ return Value, (len(List) + 1) * 2
+ if Value.startswith('"') and Value.endswith('"'):
+ # ASCII String
+ # translate escape character
+ try:
+ Value = eval(Value)
+ except:
+ Value = Value[1:-1]
+ List = list(Value)
+ List.reverse()
+ Value = 0
+ for Char in List:
+ Value = (Value << 8) | ord(Char)
+ return Value, len(List) + 1
+ if Value.startswith("L'") and Value.endswith("'"):
+ # Unicode Character Constant
+ # translate escape character
+ Value = Value[1:]
+ try:
+ Value = eval(Value)
+ except:
+ Value = Value[1:-1]
+ List = list(Value)
+ if len(List) == 0:
+ raise BadExpression('Length %s is %s' % (Value, len(List)))
+ List.reverse()
+ Value = 0
+ for Char in List:
+ Value = (Value << 16) | ord(Char)
+ return Value, len(List) * 2
+ if Value.startswith("'") and Value.endswith("'"):
+ # Character constant
+ # translate escape character
+ try:
+ Value = eval(Value)
+ except:
+ Value = Value[1:-1]
+ List = list(Value)
+ if len(List) == 0:
+ raise BadExpression('Length %s is %s' % (Value, len(List)))
+ List.reverse()
+ Value = 0
+ for Char in List:
+ Value = (Value << 8) | ord(Char)
+ return Value, len(List)
+ if Value.startswith('{') and Value.endswith('}'):
+ # Byte array
+ Value = Value[1:-1]
+ List = [Item.strip() for Item in Value.split(',')]
+ List.reverse()
+ Value = 0
+ RetSize = 0
+ for Item in List:
+ ItemValue, Size = ParseFieldValue(Item)
+ RetSize += Size
+ for I in range(Size):
+ Value = (Value << 8) | ((ItemValue >> 8 * I) & 0xff)
+ return Value, RetSize
+ if Value.startswith('DEVICE_PATH(') and Value.endswith(')'):
+ Value = Value.replace("DEVICE_PATH(", '').rstrip(')')
+ Value = Value.strip().strip('"')
+ return ParseDevPathValue(Value)
+ if Value.lower().startswith('0x'):
+ try:
+ Value = int(Value, 16)
+ except:
+ raise BadExpression("invalid hex value: %s" % Value)
+ if Value == 0:
+ return 0, 1
+ return Value, (Value.bit_length() + 7) // 8
+ if Value[0].isdigit():
+ Value = int(Value, 10)
+ if Value == 0:
+ return 0, 1
+ return Value, (Value.bit_length() + 7) // 8
+ if Value.lower() == 'true':
+ return 1, 1
+ if Value.lower() == 'false':
+ return 0, 1
+ return Value, 1
+
+## AnalyzeDscPcd
+#
+# Analyze DSC PCD value, since there is no data type info in DSC
+# This function is used to match functions (AnalyzePcdData) used for retrieving PCD value from database
+# 1. Feature flag: TokenSpace.PcdCName|PcdValue
+# 2. Fix and Patch:TokenSpace.PcdCName|PcdValue[|VOID*[|MaxSize]]
+# 3. Dynamic default:
+# TokenSpace.PcdCName|PcdValue[|VOID*[|MaxSize]]
+# TokenSpace.PcdCName|PcdValue
+# 4. Dynamic VPD:
+# TokenSpace.PcdCName|VpdOffset[|VpdValue]
+# TokenSpace.PcdCName|VpdOffset[|MaxSize[|VpdValue]]
+# 5. Dynamic HII:
+# TokenSpace.PcdCName|HiiString|VariableGuid|VariableOffset[|HiiValue]
+# PCD value needs to be located in such kind of string, and the PCD value might be an expression in which
+# there might have "|" operator, also in string value.
+#
+# @param Setting: String contain information described above with "TokenSpace.PcdCName|" stripped
+# @param PcdType: PCD type: feature, fixed, dynamic default VPD HII
+# @param DataType: The datum type of PCD: VOID*, UNIT, BOOL
+# @retval:
+# ValueList: A List contain fields described above
+# IsValid: True if conforming EBNF, otherwise False
+# Index: The index where PcdValue is in ValueList
+#
+def AnalyzeDscPcd(Setting, PcdType, DataType=''):
+ FieldList = AnalyzePcdExpression(Setting)
+
+ IsValid = True
+ if PcdType in (MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT):
+ Value = FieldList[0]
+ Size = ''
+ if len(FieldList) > 1 and FieldList[1]:
+ DataType = FieldList[1]
+ if FieldList[1] != TAB_VOID and StructPattern.match(FieldList[1]) is None:
+ IsValid = False
+ if len(FieldList) > 2:
+ Size = FieldList[2]
+ if IsValid:
+ if DataType == "":
+ IsValid = (len(FieldList) <= 1)
+ else:
+ IsValid = (len(FieldList) <= 3)
+
+ if Size:
+ try:
+ int(Size, 16) if Size.upper().startswith("0X") else int(Size)
+ except:
+ IsValid = False
+ Size = -1
+ return [str(Value), DataType, str(Size)], IsValid, 0
+ elif PcdType == MODEL_PCD_FEATURE_FLAG:
+ Value = FieldList[0]
+ Size = ''
+ IsValid = (len(FieldList) <= 1)
+ return [Value, DataType, str(Size)], IsValid, 0
+ elif PcdType in (MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_VPD):
+ VpdOffset = FieldList[0]
+ Value = Size = ''
+ if not DataType == TAB_VOID:
+ if len(FieldList) > 1:
+ Value = FieldList[1]
+ else:
+ if len(FieldList) > 1:
+ Size = FieldList[1]
+ if len(FieldList) > 2:
+ Value = FieldList[2]
+ if DataType == "":
+ IsValid = (len(FieldList) <= 1)
+ else:
+ IsValid = (len(FieldList) <= 3)
+ if Size:
+ try:
+ int(Size, 16) if Size.upper().startswith("0X") else int(Size)
+ except:
+ IsValid = False
+ Size = -1
+ return [VpdOffset, str(Size), Value], IsValid, 2
+ elif PcdType in (MODEL_PCD_DYNAMIC_HII, MODEL_PCD_DYNAMIC_EX_HII):
+ IsValid = (3 <= len(FieldList) <= 5)
+ HiiString = FieldList[0]
+ Guid = Offset = Value = Attribute = ''
+ if len(FieldList) > 1:
+ Guid = FieldList[1]
+ if len(FieldList) > 2:
+ Offset = FieldList[2]
+ if len(FieldList) > 3:
+ Value = FieldList[3]
+ if len(FieldList) > 4:
+ Attribute = FieldList[4]
+ return [HiiString, Guid, Offset, Value, Attribute], IsValid, 3
+ return [], False, 0
+
+## AnalyzePcdData
+#
+# Analyze the pcd Value, Datum type and TokenNumber.
+# Used to avoid split issue while the value string contain "|" character
+#
+# @param[in] Setting: A String contain value/datum type/token number information;
+#
+# @retval ValueList: A List contain value, datum type and toke number.
+#
+def AnalyzePcdData(Setting):
+ ValueList = ['', '', '']
+
+ ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
+ PtrValue = ValueRe.findall(Setting)
+
+ ValueUpdateFlag = False
+
+ if len(PtrValue) >= 1:
+ Setting = re.sub(ValueRe, '', Setting)
+ ValueUpdateFlag = True
+
+ TokenList = Setting.split(TAB_VALUE_SPLIT)
+ ValueList[0:len(TokenList)] = TokenList
+
+ if ValueUpdateFlag:
+ ValueList[0] = PtrValue[0]
+
+ return ValueList
+
+## check format of PCD value against its the datum type
+#
+# For PCD value setting
+#
+def CheckPcdDatum(Type, Value):
+ if Type == TAB_VOID:
+ ValueRe = re.compile(r'\s*L?\".*\"\s*$')
+ if not (((Value.startswith('L"') or Value.startswith('"')) and Value.endswith('"'))
+ or (Value.startswith('{') and Value.endswith('}')) or (Value.startswith("L'") or Value.startswith("'") and Value.endswith("'"))
+ ):
+ return False, "Invalid value [%s] of type [%s]; must be in the form of {...} for array"\
+ ", \"...\" or \'...\' for string, L\"...\" or L\'...\' for unicode string" % (Value, Type)
+ elif ValueRe.match(Value):
+ # Check the chars in UnicodeString or CString is printable
+ if Value.startswith("L"):
+ Value = Value[2:-1]
+ else:
+ Value = Value[1:-1]
+ Printset = set(string.printable)
+ Printset.remove(TAB_PRINTCHAR_VT)
+ Printset.add(TAB_PRINTCHAR_BS)
+ Printset.add(TAB_PRINTCHAR_NUL)
+ if not set(Value).issubset(Printset):
+ PrintList = sorted(Printset)
+ return False, "Invalid PCD string value of type [%s]; must be printable chars %s." % (Type, PrintList)
+ elif Type == 'BOOLEAN':
+ if Value not in ['TRUE', 'True', 'true', '0x1', '0x01', '1', 'FALSE', 'False', 'false', '0x0', '0x00', '0']:
+ return False, "Invalid value [%s] of type [%s]; must be one of TRUE, True, true, 0x1, 0x01, 1"\
+ ", FALSE, False, false, 0x0, 0x00, 0" % (Value, Type)
+ elif Type in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64]:
+ if Value.startswith('0') and not Value.lower().startswith('0x') and len(Value) > 1 and Value.lstrip('0'):
+ Value = Value.lstrip('0')
+ try:
+ if Value and int(Value, 0) < 0:
+ return False, "PCD can't be set to negative value[%s] for datum type [%s]" % (Value, Type)
+ Value = int(Value, 0)
+ if Value > MAX_VAL_TYPE[Type]:
+ return False, "Too large PCD value[%s] for datum type [%s]" % (Value, Type)
+ except:
+ return False, "Invalid value [%s] of type [%s];"\
+ " must be a hexadecimal, decimal or octal in C language format." % (Value, Type)
+ else:
+ return True, "StructurePcd"
+
+ return True, ""
+
+def CommonPath(PathList):
+ P1 = min(PathList).split(os.path.sep)
+ P2 = max(PathList).split(os.path.sep)
+ for Index in range(min(len(P1), len(P2))):
+ if P1[Index] != P2[Index]:
+ return os.path.sep.join(P1[:Index])
+ return os.path.sep.join(P1)
+
+class PathClass(object):
+ def __init__(self, File='', Root='', AlterRoot='', Type='', IsBinary=False,
+ Arch='COMMON', ToolChainFamily='', Target='', TagName='', ToolCode=''):
+ self.Arch = Arch
+ self.File = str(File)
+ if os.path.isabs(self.File):
+ self.Root = ''
+ self.AlterRoot = ''
+ else:
+ self.Root = str(Root)
+ self.AlterRoot = str(AlterRoot)
+
+ # Remove any '.' and '..' in path
+ if self.Root:
+ self.Root = mws.getWs(self.Root, self.File)
+ self.Path = os.path.normpath(os.path.join(self.Root, self.File))
+ self.Root = os.path.normpath(CommonPath([self.Root, self.Path]))
+ # eliminate the side-effect of 'C:'
+ if self.Root[-1] == ':':
+ self.Root += os.path.sep
+ # file path should not start with path separator
+ if self.Root[-1] == os.path.sep:
+ self.File = self.Path[len(self.Root):]
+ else:
+ self.File = self.Path[len(self.Root) + 1:]
+ else:
+ self.Path = os.path.normpath(self.File)
+
+ self.SubDir, self.Name = os.path.split(self.File)
+ self.BaseName, self.Ext = os.path.splitext(self.Name)
+
+ if self.Root:
+ if self.SubDir:
+ self.Dir = os.path.join(self.Root, self.SubDir)
+ else:
+ self.Dir = self.Root
+ else:
+ self.Dir = self.SubDir
+
+ if IsBinary:
+ self.Type = Type
+ else:
+ self.Type = self.Ext.lower()
+
+ self.IsBinary = IsBinary
+ self.Target = Target
+ self.TagName = TagName
+ self.ToolCode = ToolCode
+ self.ToolChainFamily = ToolChainFamily
+ self.OriginalPath = self
+
+ ## Convert the object of this class to a string
+ #
+ # Convert member Path of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return self.Path
+
+ ## Override __eq__ function
+ #
+ # Check whether PathClass are the same
+ #
+ # @retval False The two PathClass are different
+ # @retval True The two PathClass are the same
+ #
+ def __eq__(self, Other):
+ return self.Path == str(Other)
+
+ ## Override __cmp__ function
+ #
+ # Customize the comparison operation of two PathClass
+ #
+ # @retval 0 The two PathClass are different
+ # @retval -1 The first PathClass is less than the second PathClass
+ # @retval 1 The first PathClass is Bigger than the second PathClass
+ def __cmp__(self, Other):
+ OtherKey = str(Other)
+
+ SelfKey = self.Path
+ if SelfKey == OtherKey:
+ return 0
+ elif SelfKey > OtherKey:
+ return 1
+ else:
+ return -1
+
+ ## Override __hash__ function
+ #
+ # Use Path as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.Path)
+
+ @cached_property
+ def Key(self):
+ return self.Path.upper()
+
+ @property
+ def TimeStamp(self):
+ return os.stat(self.Path)[8]
+
+ def Validate(self, Type='', CaseSensitive=True):
+ def RealPath2(File, Dir='', OverrideDir=''):
+ NewFile = None
+ if OverrideDir:
+ NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(OverrideDir, File))]
+ if NewFile:
+ if OverrideDir[-1] == os.path.sep:
+ return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)]
+ else:
+ return NewFile[len(OverrideDir) + 1:], NewFile[0:len(OverrideDir)]
+ if GlobalData.gAllFiles:
+ NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))]
+ if not NewFile:
+ NewFile = os.path.normpath(os.path.join(Dir, File))
+ if not os.path.exists(NewFile):
+ return None, None
+ if NewFile:
+ if Dir:
+ if Dir[-1] == os.path.sep:
+ return NewFile[len(Dir):], NewFile[0:len(Dir)]
+ else:
+ return NewFile[len(Dir) + 1:], NewFile[0:len(Dir)]
+ else:
+ return NewFile, ''
+
+ return None, None
+
+ if GlobalData.gCaseInsensitive:
+ CaseSensitive = False
+ if Type and Type.lower() != self.Type:
+ return FILE_TYPE_MISMATCH, '%s (expect %s but got %s)' % (self.File, Type, self.Type)
+
+ RealFile, RealRoot = RealPath2(self.File, self.Root, self.AlterRoot)
+ if not RealRoot and not RealFile:
+ RealFile = self.File
+ if self.AlterRoot:
+ RealFile = os.path.join(self.AlterRoot, self.File)
+ elif self.Root:
+ RealFile = os.path.join(self.Root, self.File)
+ if len (mws.getPkgPath()) == 0:
+ return FILE_NOT_FOUND, os.path.join(self.AlterRoot, RealFile)
+ else:
+ return FILE_NOT_FOUND, "%s is not found in packages path:\n\t%s" % (self.File, '\n\t'.join(mws.getPkgPath()))
+
+ ErrorCode = 0
+ ErrorInfo = ''
+ if RealRoot != self.Root or RealFile != self.File:
+ if CaseSensitive and (RealFile != self.File or (RealRoot != self.Root and RealRoot != self.AlterRoot)):
+ ErrorCode = FILE_CASE_MISMATCH
+ ErrorInfo = self.File + '\n\t' + RealFile + " [in file system]"
+
+ self.SubDir, self.Name = os.path.split(RealFile)
+ self.BaseName, self.Ext = os.path.splitext(self.Name)
+ if self.SubDir:
+ self.Dir = os.path.join(RealRoot, self.SubDir)
+ else:
+ self.Dir = RealRoot
+ self.File = RealFile
+ self.Root = RealRoot
+ self.Path = os.path.join(RealRoot, RealFile)
+ return ErrorCode, ErrorInfo
+
+## Parse PE image to get the required PE information.
+#
+class PeImageClass():
+ ## Constructor
+ #
+ # @param File FilePath of PeImage
+ #
+ def __init__(self, PeFile):
+ self.FileName = PeFile
+ self.IsValid = False
+ self.Size = 0
+ self.EntryPoint = 0
+ self.SectionAlignment = 0
+ self.SectionHeaderList = []
+ self.ErrorInfo = ''
+ try:
+ PeObject = open(PeFile, 'rb')
+ except:
+ self.ErrorInfo = self.FileName + ' can not be found\n'
+ return
+ # Read DOS header
+ ByteArray = array.array('B')
+ ByteArray.fromfile(PeObject, 0x3E)
+ ByteList = ByteArray.tolist()
+ # DOS signature should be 'MZ'
+ if self._ByteListToStr (ByteList[0x0:0x2]) != 'MZ':
+ self.ErrorInfo = self.FileName + ' has no valid DOS signature MZ'
+ return
+
+ # Read 4 byte PE Signature
+ PeOffset = self._ByteListToInt(ByteList[0x3C:0x3E])
+ PeObject.seek(PeOffset)
+ ByteArray = array.array('B')
+ ByteArray.fromfile(PeObject, 4)
+ # PE signature should be 'PE\0\0'
+ if ByteArray.tolist() != [ord('P'), ord('E'), 0, 0]:
+ self.ErrorInfo = self.FileName + ' has no valid PE signature PE00'
+ return
+
+ # Read PE file header
+ ByteArray = array.array('B')
+ ByteArray.fromfile(PeObject, 0x14)
+ ByteList = ByteArray.tolist()
+ SecNumber = self._ByteListToInt(ByteList[0x2:0x4])
+ if SecNumber == 0:
+ self.ErrorInfo = self.FileName + ' has no section header'
+ return
+
+ # Read PE optional header
+ OptionalHeaderSize = self._ByteListToInt(ByteArray[0x10:0x12])
+ ByteArray = array.array('B')
+ ByteArray.fromfile(PeObject, OptionalHeaderSize)
+ ByteList = ByteArray.tolist()
+ self.EntryPoint = self._ByteListToInt(ByteList[0x10:0x14])
+ self.SectionAlignment = self._ByteListToInt(ByteList[0x20:0x24])
+ self.Size = self._ByteListToInt(ByteList[0x38:0x3C])
+
+ # Read each Section Header
+ for Index in range(SecNumber):
+ ByteArray = array.array('B')
+ ByteArray.fromfile(PeObject, 0x28)
+ ByteList = ByteArray.tolist()
+ SecName = self._ByteListToStr(ByteList[0:8])
+ SecVirtualSize = self._ByteListToInt(ByteList[8:12])
+ SecRawAddress = self._ByteListToInt(ByteList[20:24])
+ SecVirtualAddress = self._ByteListToInt(ByteList[12:16])
+ self.SectionHeaderList.append((SecName, SecVirtualAddress, SecRawAddress, SecVirtualSize))
+ self.IsValid = True
+ PeObject.close()
+
+ def _ByteListToStr(self, ByteList):
+ String = ''
+ for index in range(len(ByteList)):
+ if ByteList[index] == 0:
+ break
+ String += chr(ByteList[index])
+ return String
+
+ def _ByteListToInt(self, ByteList):
+ Value = 0
+ for index in range(len(ByteList) - 1, -1, -1):
+ Value = (Value << 8) | int(ByteList[index])
+ return Value
+
+class DefaultStore():
+ def __init__(self, DefaultStores ):
+
+ self.DefaultStores = DefaultStores
+ def DefaultStoreID(self, DefaultStoreName):
+ for key, value in self.DefaultStores.items():
+ if value == DefaultStoreName:
+ return key
+ return None
+ def GetDefaultDefault(self):
+ if not self.DefaultStores or "0" in self.DefaultStores:
+ return "0", TAB_DEFAULT_STORES_DEFAULT
+ else:
+ minvalue = min(int(value_str) for value_str in self.DefaultStores)
+ return (str(minvalue), self.DefaultStores[str(minvalue)])
+ def GetMin(self, DefaultSIdList):
+ if not DefaultSIdList:
+ return TAB_DEFAULT_STORES_DEFAULT
+ storeidset = {storeid for storeid, storename in self.DefaultStores.values() if storename in DefaultSIdList}
+ if not storeidset:
+ return ""
+ minid = min(storeidset )
+ for sid, name in self.DefaultStores.values():
+ if sid == minid:
+ return name
+
+class SkuClass():
+ DEFAULT = 0
+ SINGLE = 1
+ MULTIPLE =2
+
+ def __init__(self,SkuIdentifier='', SkuIds=None):
+ if SkuIds is None:
+ SkuIds = {}
+
+ for SkuName in SkuIds:
+ SkuId = SkuIds[SkuName][0]
+ skuid_num = int(SkuId, 16) if SkuId.upper().startswith("0X") else int(SkuId)
+ if skuid_num > 0xFFFFFFFFFFFFFFFF:
+ EdkLogger.error("build", PARAMETER_INVALID,
+ ExtraData = "SKU-ID [%s] value %s exceeds the max value of UINT64"
+ % (SkuName, SkuId))
+
+ self.AvailableSkuIds = OrderedDict()
+ self.SkuIdSet = []
+ self.SkuIdNumberSet = []
+ self.SkuData = SkuIds
+ self._SkuInherit = {}
+ self._SkuIdentifier = SkuIdentifier
+ if SkuIdentifier == '' or SkuIdentifier is None:
+ self.SkuIdSet = ['DEFAULT']
+ self.SkuIdNumberSet = ['0U']
+ elif SkuIdentifier == 'ALL':
+ self.SkuIdSet = list(SkuIds.keys())
+ self.SkuIdNumberSet = [num[0].strip() + 'U' for num in SkuIds.values()]
+ else:
+ r = SkuIdentifier.split('|')
+ self.SkuIdSet=[(r[k].strip()).upper() for k in range(len(r))]
+ k = None
+ try:
+ self.SkuIdNumberSet = [SkuIds[k][0].strip() + 'U' for k in self.SkuIdSet]
+ except Exception:
+ EdkLogger.error("build", PARAMETER_INVALID,
+ ExtraData = "SKU-ID [%s] is not supported by the platform. [Valid SKU-ID: %s]"
+ % (k, " | ".join(SkuIds.keys())))
+ for each in self.SkuIdSet:
+ if each in SkuIds:
+ self.AvailableSkuIds[each] = SkuIds[each][0]
+ else:
+ EdkLogger.error("build", PARAMETER_INVALID,
+ ExtraData="SKU-ID [%s] is not supported by the platform. [Valid SKU-ID: %s]"
+ % (each, " | ".join(SkuIds.keys())))
+ if self.SkuUsageType != SkuClass.SINGLE:
+ self.AvailableSkuIds.update({'DEFAULT':0, 'COMMON':0})
+ if self.SkuIdSet:
+ GlobalData.gSkuids = (self.SkuIdSet)
+ if 'COMMON' in GlobalData.gSkuids:
+ GlobalData.gSkuids.remove('COMMON')
+ if self.SkuUsageType == self.SINGLE:
+ if len(GlobalData.gSkuids) != 1:
+ if 'DEFAULT' in GlobalData.gSkuids:
+ GlobalData.gSkuids.remove('DEFAULT')
+ if GlobalData.gSkuids:
+ GlobalData.gSkuids.sort()
+
+ def GetNextSkuId(self, skuname):
+ if not self._SkuInherit:
+ self._SkuInherit = {}
+ for item in self.SkuData.values():
+ self._SkuInherit[item[1]]=item[2] if item[2] else "DEFAULT"
+ return self._SkuInherit.get(skuname, "DEFAULT")
+
+ def GetSkuChain(self, sku):
+ if sku == "DEFAULT":
+ return ["DEFAULT"]
+ skulist = [sku]
+ nextsku = sku
+ while True:
+ nextsku = self.GetNextSkuId(nextsku)
+ skulist.append(nextsku)
+ if nextsku == "DEFAULT":
+ break
+ skulist.reverse()
+ return skulist
+ def SkuOverrideOrder(self):
+ skuorderset = []
+ for skuname in self.SkuIdSet:
+ skuorderset.append(self.GetSkuChain(skuname))
+
+ skuorder = []
+ for index in range(max(len(item) for item in skuorderset)):
+ for subset in skuorderset:
+ if index > len(subset)-1:
+ continue
+ if subset[index] in skuorder:
+ continue
+ skuorder.append(subset[index])
+
+ return skuorder
+
+ @property
+ def SkuUsageType(self):
+ if self._SkuIdentifier.upper() == "ALL":
+ return SkuClass.MULTIPLE
+
+ if len(self.SkuIdSet) == 1:
+ if self.SkuIdSet[0] == 'DEFAULT':
+ return SkuClass.DEFAULT
+ return SkuClass.SINGLE
+ if len(self.SkuIdSet) == 2 and 'DEFAULT' in self.SkuIdSet:
+ return SkuClass.SINGLE
+ return SkuClass.MULTIPLE
+
+ def DumpSkuIdArrary(self):
+ if self.SkuUsageType == SkuClass.SINGLE:
+ return "{0x0}"
+ ArrayStrList = []
+ for skuname in self.AvailableSkuIds:
+ if skuname == "COMMON":
+ continue
+ while skuname != "DEFAULT":
+ ArrayStrList.append(hex(int(self.AvailableSkuIds[skuname])))
+ skuname = self.GetNextSkuId(skuname)
+ ArrayStrList.append("0x0")
+ return "{{{myList}}}".format(myList=",".join(ArrayStrList))
+
+ @property
+ def AvailableSkuIdSet(self):
+ return self.AvailableSkuIds
+
+ @property
+ def SystemSkuId(self):
+ if self.SkuUsageType == SkuClass.SINGLE:
+ if len(self.SkuIdSet) == 1:
+ return self.SkuIdSet[0]
+ else:
+ return self.SkuIdSet[0] if self.SkuIdSet[0] != 'DEFAULT' else self.SkuIdSet[1]
+ else:
+ return 'DEFAULT'
+
+## Get the integer value from string like "14U" or integer like 2
+#
+# @param Input The object that may be either a integer value or a string
+#
+# @retval Value The integer value that the input represents
+#
+def GetIntegerValue(Input):
+ if not isinstance(Input, str):
+ return Input
+ String = Input
+ if String.endswith("U"):
+ String = String[:-1]
+ if String.endswith("ULL"):
+ String = String[:-3]
+ if String.endswith("LL"):
+ String = String[:-2]
+
+ if String.startswith("0x") or String.startswith("0X"):
+ return int(String, 16)
+ elif String == '':
+ return 0
+ else:
+ return int(String)
+
+#
+# Pack a GUID (registry format) list into a buffer and return it
+#
+def PackGUID(Guid):
+ return pack(PACK_PATTERN_GUID,
+ int(Guid[0], 16),
+ int(Guid[1], 16),
+ int(Guid[2], 16),
+ int(Guid[3][-4:-2], 16),
+ int(Guid[3][-2:], 16),
+ int(Guid[4][-12:-10], 16),
+ int(Guid[4][-10:-8], 16),
+ int(Guid[4][-8:-6], 16),
+ int(Guid[4][-6:-4], 16),
+ int(Guid[4][-4:-2], 16),
+ int(Guid[4][-2:], 16)
+ )
+
+#
+# Pack a GUID (byte) list into a buffer and return it
+#
+def PackByteFormatGUID(Guid):
+ return pack(PACK_PATTERN_GUID,
+ Guid[0],
+ Guid[1],
+ Guid[2],
+ Guid[3],
+ Guid[4],
+ Guid[5],
+ Guid[6],
+ Guid[7],
+ Guid[8],
+ Guid[9],
+ Guid[10],
+ )
+
+## DeepCopy dict/OrderedDict recusively
+#
+# @param ori_dict a nested dict or ordereddict
+#
+# @retval new dict or orderdict
+#
+def CopyDict(ori_dict):
+ dict_type = ori_dict.__class__
+ if dict_type not in (dict,OrderedDict):
+ return ori_dict
+ new_dict = dict_type()
+ for key in ori_dict:
+ if isinstance(ori_dict[key],(dict,OrderedDict)):
+ new_dict[key] = CopyDict(ori_dict[key])
+ else:
+ new_dict[key] = ori_dict[key]
+ return new_dict
+
+#
+# Remove the c/c++ comments: // and /* */
+#
+def RemoveCComments(ctext):
+ return re.sub('//.*?\n|/\*.*?\*/', '\n', ctext, flags=re.S)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/MultipleWorkspace.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/MultipleWorkspace.py
new file mode 100755
index 00000000..df461f28
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/MultipleWorkspace.py
@@ -0,0 +1,150 @@
+## @file
+# manage multiple workspace file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+import Common.LongFilePathOs as os
+from Common.DataType import TAB_WORKSPACE
+
+## MultipleWorkspace
+#
+# This class manage multiple workspace behavior
+#
+# @param class:
+#
+# @var WORKSPACE: defined the current WORKSPACE
+# @var PACKAGES_PATH: defined the other WORKSPACE, if current WORKSPACE is invalid, search valid WORKSPACE from PACKAGES_PATH
+#
+class MultipleWorkspace(object):
+ WORKSPACE = ''
+ PACKAGES_PATH = None
+
+ ## convertPackagePath()
+ #
+ # Convert path to match workspace.
+ #
+ # @param cls The class pointer
+ # @param Ws The current WORKSPACE
+ # @param Path Path to be converted to match workspace.
+ #
+ @classmethod
+ def convertPackagePath(cls, Ws, Path):
+ if str(os.path.normcase (Path)).startswith(Ws):
+ return os.path.join(Ws, os.path.relpath(Path, Ws))
+ return Path
+
+ ## setWs()
+ #
+ # set WORKSPACE and PACKAGES_PATH environment
+ #
+ # @param cls The class pointer
+ # @param Ws initialize WORKSPACE variable
+ # @param PackagesPath initialize PackagesPath variable
+ #
+ @classmethod
+ def setWs(cls, Ws, PackagesPath=None):
+ cls.WORKSPACE = Ws
+ if PackagesPath:
+ cls.PACKAGES_PATH = [cls.convertPackagePath (Ws, os.path.normpath(Path.strip())) for Path in PackagesPath.split(os.pathsep)]
+ else:
+ cls.PACKAGES_PATH = []
+
+ ## join()
+ #
+ # rewrite os.path.join function
+ #
+ # @param cls The class pointer
+ # @param Ws the current WORKSPACE
+ # @param *p path of the inf/dec/dsc/fdf/conf file
+ # @retval Path the absolute path of specified file
+ #
+ @classmethod
+ def join(cls, Ws, *p):
+ Path = os.path.join(Ws, *p)
+ if not os.path.exists(Path):
+ for Pkg in cls.PACKAGES_PATH:
+ Path = os.path.join(Pkg, *p)
+ if os.path.exists(Path):
+ return Path
+ Path = os.path.join(Ws, *p)
+ return Path
+
+ ## relpath()
+ #
+ # rewrite os.path.relpath function
+ #
+ # @param cls The class pointer
+ # @param Path path of the inf/dec/dsc/fdf/conf file
+ # @param Ws the current WORKSPACE
+ # @retval Path the relative path of specified file
+ #
+ @classmethod
+ def relpath(cls, Path, Ws):
+ for Pkg in cls.PACKAGES_PATH:
+ if Path.lower().startswith(Pkg.lower()):
+ Path = os.path.relpath(Path, Pkg)
+ return Path
+ if Path.lower().startswith(Ws.lower()):
+ Path = os.path.relpath(Path, Ws)
+ return Path
+
+ ## getWs()
+ #
+ # get valid workspace for the path
+ #
+ # @param cls The class pointer
+ # @param Ws the current WORKSPACE
+ # @param Path path of the inf/dec/dsc/fdf/conf file
+ # @retval Ws the valid workspace relative to the specified file path
+ #
+ @classmethod
+ def getWs(cls, Ws, Path):
+ absPath = os.path.join(Ws, Path)
+ if not os.path.exists(absPath):
+ for Pkg in cls.PACKAGES_PATH:
+ absPath = os.path.join(Pkg, Path)
+ if os.path.exists(absPath):
+ return Pkg
+ return Ws
+
+ ## handleWsMacro()
+ #
+ # handle the $(WORKSPACE) tag, if current workspace is invalid path relative the tool, replace it.
+ #
+ # @param cls The class pointer
+ # @retval PathStr Path string include the $(WORKSPACE)
+ #
+ @classmethod
+ def handleWsMacro(cls, PathStr):
+ if TAB_WORKSPACE in PathStr:
+ PathList = PathStr.split()
+ if PathList:
+ for i, str in enumerate(PathList):
+ MacroStartPos = str.find(TAB_WORKSPACE)
+ if MacroStartPos != -1:
+ Substr = str[MacroStartPos:]
+ Path = Substr.replace(TAB_WORKSPACE, cls.WORKSPACE).strip()
+ if not os.path.exists(Path):
+ for Pkg in cls.PACKAGES_PATH:
+ Path = Substr.replace(TAB_WORKSPACE, Pkg).strip()
+ if os.path.exists(Path):
+ break
+ PathList[i] = str[0:MacroStartPos] + Path
+ PathStr = ' '.join(PathList)
+ return PathStr
+
+ ## getPkgPath()
+ #
+ # get all package paths.
+ #
+ # @param cls The class pointer
+ #
+ @classmethod
+ def getPkgPath(cls):
+ return cls.PACKAGES_PATH
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Parsing.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Parsing.py
new file mode 100755
index 00000000..5ae50402
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Parsing.py
@@ -0,0 +1,906 @@
+## @file
+# This file is used to define common parsing related functions used in parsing INF/DEC/DSC process
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from .StringUtils import *
+from CommonDataClass.DataClass import *
+from .DataType import *
+
+## ParseDefineMacro
+#
+# Search whole table to find all defined Macro and replaced them with the real values
+#
+def ParseDefineMacro2(Table, RecordSets, GlobalMacro):
+ Macros = {}
+ #
+ # Find all DEFINE macros in section [Header] and its section
+ #
+ SqlCommand = """select Value1, Value2, BelongsToItem, StartLine, Arch from %s
+ where Model = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+ Macros[Record[0]] = Record[1]
+
+ #
+ # Overridden by Global Macros
+ #
+ Macros.update(GlobalMacro)
+
+ #
+ # Replace the Macros
+ #
+ for Value in (v for v in RecordSets.values() if v):
+ for Item in Value:
+ Item[0] = ReplaceMacro(Item[0], Macros)
+
+## ParseDefineMacro
+#
+# Search whole table to find all defined Macro and replaced them with the real values
+#
+def ParseDefineMacro(Table, GlobalMacro):
+ Macros = {}
+ #
+ # Find all DEFINE macros
+ #
+ SqlCommand = """select Value1, Value2, BelongsToItem, StartLine, Arch from %s
+ where Model = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+#***************************************************************************************************************************************************
+# The follow SqlCommand (expr replace) is not supported in Sqlite 3.3.4 which is used in Python 2.5 *
+# Reserved Only *
+# SqlCommand = """update %s set Value1 = replace(Value1, '%s', '%s') *
+# where ID in (select ID from %s *
+# where Model = %s *
+# and Value1 like '%%%s%%' *
+# and StartLine > %s *
+# and Enabled > -1 *
+# and Arch = '%s')""" % \ *
+# (self.TblDsc.Table, Record[0], Record[1], self.TblDsc.Table, Record[2], Record[1], Record[3], Record[4]) *
+#***************************************************************************************************************************************************
+ Macros[Record[0]] = Record[1]
+
+ #
+ # Overridden by Global Macros
+ #
+ Macros.update(GlobalMacro)
+
+ #
+ # Found all defined macro and replaced
+ #
+ SqlCommand = """select ID, Value1 from %s
+ where Model != %s
+ and Value1 like '%%$(%%' and Value1 like '%%)%%'
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
+ FoundRecords = Table.Exec(SqlCommand)
+ for FoundRecord in FoundRecords:
+ NewValue = ReplaceMacro(FoundRecord[1], Macros)
+ SqlCommand = """update %s set Value1 = '%s'
+ where ID = %s""" % (Table.Table, ConvertToSqlString2(NewValue), FoundRecord[0])
+ Table.Exec(SqlCommand)
+
+##QueryDefinesItem
+#
+# Search item of section [Defines] by name, return its values
+#
+# @param Table: The Table to be executed
+# @param Name: The Name of item of section [Defines]
+# @param Arch: The Arch of item of section [Defines]
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryDefinesItem(Table, Name, Arch, BelongsToFile):
+ SqlCommand = """select Value2 from %s
+ where Model = %s
+ and Value1 = '%s'
+ and Arch = '%s'
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Name), ConvertToSqlString2(Arch), BelongsToFile)
+ RecordSet = Table.Exec(SqlCommand)
+ if len(RecordSet) < 1:
+ SqlCommand = """select Value2 from %s
+ where Model = %s
+ and Value1 = '%s'
+ and Arch = '%s'
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Name), ConvertToSqlString2(TAB_ARCH_COMMON.upper()), BelongsToFile)
+ RecordSet = Table.Exec(SqlCommand)
+ if len(RecordSet) == 1:
+ if Name == TAB_INF_DEFINES_LIBRARY_CLASS:
+ return [RecordSet[0][0]]
+ else:
+ return GetSplitValueList(RecordSet[0][0])
+ elif len(RecordSet) < 1:
+ return ['']
+ elif len(RecordSet) > 1:
+ RetVal = []
+ for Record in RecordSet:
+ if Name == TAB_INF_DEFINES_LIBRARY_CLASS:
+ RetVal.append(Record[0])
+ else:
+ Items = GetSplitValueList(Record[0])
+ for Item in Items:
+ RetVal.append(Item)
+ return RetVal
+
+##QueryDefinesItem
+#
+# Search item of section [Defines] by name, return its values
+#
+# @param Table: The Table to be executed
+# @param Name: The Name of item of section [Defines]
+# @param Arch: The Arch of item of section [Defines]
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryDefinesItem2(Table, Arch, BelongsToFile):
+ SqlCommand = """select Value1, Value2, StartLine from %s
+ where Model = %s
+ and Arch = '%s'
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Arch), BelongsToFile)
+ RecordSet = Table.Exec(SqlCommand)
+ if len(RecordSet) < 1:
+ SqlCommand = """select Value1, Value2, StartLine from %s
+ where Model = %s
+ and Arch = '%s'
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(TAB_ARCH_COMMON), BelongsToFile)
+ RecordSet = Table.Exec(SqlCommand)
+
+ return RecordSet
+
+##QueryDscItem
+#
+# Search all dsc item for a specific section
+#
+# @param Table: The Table to be executed
+# @param Model: The type of section
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryDscItem(Table, Model, BelongsToItem, BelongsToFile):
+ SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
+ where Model = %s
+ and BelongsToItem = %s
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, Model, BelongsToItem, BelongsToFile)
+ return Table.Exec(SqlCommand)
+
+##QueryDecItem
+#
+# Search all dec item for a specific section
+#
+# @param Table: The Table to be executed
+# @param Model: The type of section
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryDecItem(Table, Model, BelongsToItem):
+ SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
+ where Model = %s
+ and BelongsToItem = %s
+ and Enabled > -1""" % (Table.Table, Model, BelongsToItem)
+ return Table.Exec(SqlCommand)
+
+##QueryInfItem
+#
+# Search all dec item for a specific section
+#
+# @param Table: The Table to be executed
+# @param Model: The type of section
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryInfItem(Table, Model, BelongsToItem):
+ SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
+ where Model = %s
+ and BelongsToItem = %s
+ and Enabled > -1""" % (Table.Table, Model, BelongsToItem)
+ return Table.Exec(SqlCommand)
+
+## GetBuildOption
+#
+# Parse a string with format "[<Family>:]<ToolFlag>=Flag"
+# Return (Family, ToolFlag, Flag)
+#
+# @param String: String with BuildOption statement
+# @param File: The file which defines build option, used in error report
+#
+# @retval truple() A truple structure as (Family, ToolChain, Flag)
+#
+def GetBuildOption(String, File, LineNo = -1):
+ (Family, ToolChain, Flag) = ('', '', '')
+ if String.find(TAB_EQUAL_SPLIT) < 0:
+ RaiseParserError(String, 'BuildOptions', File, '[<Family>:]<ToolFlag>=Flag', LineNo)
+ else:
+ List = GetSplitValueList(String, TAB_EQUAL_SPLIT, MaxSplit = 1)
+ if List[0].find(':') > -1:
+ Family = List[0][ : List[0].find(':')].strip()
+ ToolChain = List[0][List[0].find(':') + 1 : ].strip()
+ else:
+ ToolChain = List[0].strip()
+ Flag = List[1].strip()
+ return (Family, ToolChain, Flag)
+
+## Get Library Class
+#
+# Get Library of Dsc as <LibraryClassKeyWord>|<LibraryInstance>
+#
+# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (LibraryClassKeyWord, LibraryInstance, [SUP_MODULE_LIST]) Formatted Library Item
+#
+def GetLibraryClass(Item, ContainerFile, WorkspaceDir, LineNo = -1):
+ List = GetSplitValueList(Item[0])
+ SupMod = SUP_MODULE_LIST_STRING
+ if len(List) != 2:
+ RaiseParserError(Item[0], 'LibraryClasses', ContainerFile, '<LibraryClassKeyWord>|<LibraryInstance>')
+ else:
+ CheckFileType(List[1], '.Inf', ContainerFile, 'library class instance', Item[0], LineNo)
+ CheckFileExist(WorkspaceDir, List[1], ContainerFile, 'LibraryClasses', Item[0], LineNo)
+ if Item[1] != '':
+ SupMod = Item[1]
+
+ return (List[0], List[1], SupMod)
+
+## Get Library Class
+#
+# Get Library of Dsc as <LibraryClassKeyWord>[|<LibraryInstance>][|<TokenSpaceGuidCName>.<PcdCName>]
+#
+# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (LibraryClassKeyWord, LibraryInstance, [SUP_MODULE_LIST]) Formatted Library Item
+#
+def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo = -1):
+ ItemList = GetSplitValueList((Item[0] + DataType.TAB_VALUE_SPLIT * 2))
+ SupMod = SUP_MODULE_LIST_STRING
+
+ if len(ItemList) > 5:
+ RaiseParserError(Item[0], 'LibraryClasses', ContainerFile, '<LibraryClassKeyWord>[|<LibraryInstance>][|<TokenSpaceGuidCName>.<PcdCName>]')
+ else:
+ CheckFileType(ItemList[1], '.Inf', ContainerFile, 'LibraryClasses', Item[0], LineNo)
+ CheckFileExist(WorkspaceDir, ItemList[1], ContainerFile, 'LibraryClasses', Item[0], LineNo)
+ if ItemList[2] != '':
+ CheckPcdTokenInfo(ItemList[2], 'LibraryClasses', ContainerFile, LineNo)
+ if Item[1] != '':
+ SupMod = Item[1]
+
+ return (ItemList[0], ItemList[1], ItemList[2], SupMod)
+
+## CheckPcdTokenInfo
+#
+# Check if PcdTokenInfo is following <TokenSpaceGuidCName>.<PcdCName>
+#
+# @param TokenInfoString: String to be checked
+# @param Section: Used for error report
+# @param File: Used for error report
+#
+# @retval True PcdTokenInfo is in correct format
+#
+def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo = -1):
+ Format = '<TokenSpaceGuidCName>.<PcdCName>'
+ if TokenInfoString != '' and TokenInfoString is not None:
+ TokenInfoList = GetSplitValueList(TokenInfoString, TAB_SPLIT)
+ if len(TokenInfoList) == 2:
+ return True
+
+ RaiseParserError(TokenInfoString, Section, File, Format, LineNo)
+
+## Get Pcd
+#
+# Get Pcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], Type)
+#
+def GetPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, Value, MaximumDatumSize, Token = '', '', '', '', ''
+ List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
+
+ if len(List) < 4 or len(List) > 6:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]', LineNo)
+ else:
+ Value = List[1]
+ MaximumDatumSize = List[2]
+ Token = List[3]
+
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], TAB_SPLIT)
+
+ return (TokenName, TokenGuid, Value, MaximumDatumSize, Token, Type)
+
+## Get FeatureFlagPcd
+#
+# Get FeatureFlagPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], Type)
+#
+def GetFeatureFlagPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, Value = '', '', ''
+ List = GetSplitValueList(Item)
+ if len(List) != 2:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE', LineNo)
+ else:
+ Value = List[1]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, Value, Type)
+
+## Get DynamicDefaultPcd
+#
+# Get DynamicDefaultPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<DatumTyp>[|<MaxDatumSize>]]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], Type)
+#
+def GetDynamicDefaultPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, Value, DatumTyp, MaxDatumSize = '', '', '', '', ''
+ List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
+ if len(List) < 4 or len(List) > 8:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<DatumTyp>[|<MaxDatumSize>]]', LineNo)
+ else:
+ Value = List[1]
+ DatumTyp = List[2]
+ MaxDatumSize = List[3]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], TAB_SPLIT)
+
+ return (TokenName, TokenGuid, Value, DatumTyp, MaxDatumSize, Type)
+
+## Get DynamicHiiPcd
+#
+# Get DynamicHiiPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<String>|<VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], List[4], List[5], Type)
+#
+def GetDynamicHiiPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, L1, L2, L3, L4, L5 = '', '', '', '', '', '', ''
+ List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
+ if len(List) < 6 or len(List) > 8:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<String>|<VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]', LineNo)
+ else:
+ L1, L2, L3, L4, L5 = List[1], List[2], List[3], List[4], List[5]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, L1, L2, L3, L4, L5, Type)
+
+## Get DynamicVpdPcd
+#
+# Get DynamicVpdPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>[|<MaximumDatumSize>]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], Type)
+#
+def GetDynamicVpdPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, L1, L2 = '', '', '', ''
+ List = GetSplitValueList(Item + TAB_VALUE_SPLIT)
+ if len(List) < 3 or len(List) > 4:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>[|<MaximumDatumSize>]', LineNo)
+ else:
+ L1, L2 = List[1], List[2]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, L1, L2, Type)
+
+## GetComponent
+#
+# Parse block of the components defined in dsc file
+# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
+#
+# @param Lines: The content to be parsed
+# @param KeyValues: To store data after parsing
+#
+# @retval True Get component successfully
+#
+def GetComponent(Lines, KeyValues):
+ (findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
+ ListItem = None
+ LibraryClassItem = []
+ BuildOption = []
+ Pcd = []
+
+ for Line in Lines:
+ Line = Line[0]
+
+ #
+ # Ignore !include statement
+ #
+ if Line.upper().find(TAB_INCLUDE.upper() + ' ') > -1 or Line.upper().find(TAB_DEFINE + ' ') > -1:
+ continue
+
+ if findBlock == False:
+ ListItem = Line
+ #
+ # find '{' at line tail
+ #
+ if Line.endswith('{'):
+ findBlock = True
+ ListItem = CleanString(Line.rsplit('{', 1)[0], DataType.TAB_COMMENT_SPLIT)
+
+ #
+ # Parse a block content
+ #
+ if findBlock:
+ if Line.find('<LibraryClasses>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (True, False, False, False, False, False, False)
+ continue
+ if Line.find('<BuildOptions>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, True, False, False, False, False, False)
+ continue
+ if Line.find('<PcdsFeatureFlag>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, True, False, False, False, False)
+ continue
+ if Line.find('<PcdsPatchableInModule>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, True, False, False, False)
+ continue
+ if Line.find('<PcdsFixedAtBuild>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, True, False, False)
+ continue
+ if Line.find('<PcdsDynamic>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, True, False)
+ continue
+ if Line.find('<PcdsDynamicEx>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, True)
+ continue
+ if Line.endswith('}'):
+ #
+ # find '}' at line tail
+ #
+ KeyValues.append([ListItem, LibraryClassItem, BuildOption, Pcd])
+ (findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
+ LibraryClassItem, BuildOption, Pcd = [], [], []
+ continue
+
+ if findBlock:
+ if findLibraryClass:
+ LibraryClassItem.append(Line)
+ elif findBuildOption:
+ BuildOption.append(Line)
+ elif findPcdsFeatureFlag:
+ Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG_NULL, Line))
+ elif findPcdsPatchableInModule:
+ Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE_NULL, Line))
+ elif findPcdsFixedAtBuild:
+ Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD_NULL, Line))
+ elif findPcdsDynamic:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC_DEFAULT_NULL, Line))
+ elif findPcdsDynamicEx:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, Line))
+ else:
+ KeyValues.append([ListItem, [], [], []])
+
+ return True
+
+## GetExec
+#
+# Parse a string with format "InfFilename [EXEC = ExecFilename]"
+# Return (InfFilename, ExecFilename)
+#
+# @param String: String with EXEC statement
+#
+# @retval truple() A pair as (InfFilename, ExecFilename)
+#
+def GetExec(String):
+ InfFilename = ''
+ ExecFilename = ''
+ if String.find('EXEC') > -1:
+ InfFilename = String[ : String.find('EXEC')].strip()
+ ExecFilename = String[String.find('EXEC') + len('EXEC') : ].strip()
+ else:
+ InfFilename = String.strip()
+
+ return (InfFilename, ExecFilename)
+
+## GetComponents
+#
+# Parse block of the components defined in dsc file
+# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval True Get component successfully
+#
+def GetComponents(Lines, Key, KeyValues, CommentCharacter):
+ if Lines.find(DataType.TAB_SECTION_END) > -1:
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ (findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
+ ListItem = None
+ LibraryClassItem = []
+ BuildOption = []
+ Pcd = []
+
+ LineList = Lines.split('\n')
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line is None or Line == '':
+ continue
+
+ if findBlock == False:
+ ListItem = Line
+ #
+ # find '{' at line tail
+ #
+ if Line.endswith('{'):
+ findBlock = True
+ ListItem = CleanString(Line.rsplit('{', 1)[0], CommentCharacter)
+
+ #
+ # Parse a block content
+ #
+ if findBlock:
+ if Line.find('<LibraryClasses>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (True, False, False, False, False, False, False)
+ continue
+ if Line.find('<BuildOptions>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, True, False, False, False, False, False)
+ continue
+ if Line.find('<PcdsFeatureFlag>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, True, False, False, False, False)
+ continue
+ if Line.find('<PcdsPatchableInModule>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, True, False, False, False)
+ continue
+ if Line.find('<PcdsFixedAtBuild>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, True, False, False)
+ continue
+ if Line.find('<PcdsDynamic>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, True, False)
+ continue
+ if Line.find('<PcdsDynamicEx>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, True)
+ continue
+ if Line.endswith('}'):
+ #
+ # find '}' at line tail
+ #
+ KeyValues.append([ListItem, LibraryClassItem, BuildOption, Pcd])
+ (findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
+ LibraryClassItem, BuildOption, Pcd = [], [], []
+ continue
+
+ if findBlock:
+ if findLibraryClass:
+ LibraryClassItem.append(Line)
+ elif findBuildOption:
+ BuildOption.append(Line)
+ elif findPcdsFeatureFlag:
+ Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG, Line))
+ elif findPcdsPatchableInModule:
+ Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE, Line))
+ elif findPcdsFixedAtBuild:
+ Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD, Line))
+ elif findPcdsDynamic:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC, Line))
+ elif findPcdsDynamicEx:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX, Line))
+ else:
+ KeyValues.append([ListItem, [], [], []])
+
+ return True
+
+## Get Source
+#
+# Get Source of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+#
+# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1], List[2], List[3], List[4])
+#
+def GetSource(Item, ContainerFile, FileRelativePath, LineNo = -1):
+ ItemNew = Item + DataType.TAB_VALUE_SPLIT * 4
+ List = GetSplitValueList(ItemNew)
+ if len(List) < 5 or len(List) > 9:
+ RaiseParserError(Item, 'Sources', ContainerFile, '<Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]', LineNo)
+ List[0] = NormPath(List[0])
+ CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Sources', Item, LineNo)
+ if List[4] != '':
+ CheckPcdTokenInfo(List[4], 'Sources', ContainerFile, LineNo)
+
+ return (List[0], List[1], List[2], List[3], List[4])
+
+## Get Binary
+#
+# Get Binary of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+#
+# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1], List[2], List[3])
+# @retval List
+#
+def GetBinary(Item, ContainerFile, FileRelativePath, LineNo = -1):
+ ItemNew = Item + DataType.TAB_VALUE_SPLIT
+ List = GetSplitValueList(ItemNew)
+ if len(List) != 4 and len(List) != 5:
+ RaiseParserError(Item, 'Binaries', ContainerFile, "<FileType>|<Filename>|<Target>[|<TokenSpaceGuidCName>.<PcdCName>]", LineNo)
+ else:
+ if List[3] != '':
+ CheckPcdTokenInfo(List[3], 'Binaries', ContainerFile, LineNo)
+
+ if len(List) == 4:
+ return (List[0], List[1], List[2], List[3])
+ elif len(List) == 3:
+ return (List[0], List[1], List[2], '')
+ elif len(List) == 2:
+ return (List[0], List[1], '', '')
+ elif len(List) == 1:
+ return (List[0], '', '', '')
+
+## Get Guids/Protocols/Ppis
+#
+# Get Guids/Protocols/Ppis of Inf as <GuidCName>[|<PcdFeatureFlag>]
+#
+# @param Item: String as <GuidCName>[|<PcdFeatureFlag>]
+# @param Type: Type of parsing string
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1])
+#
+def GetGuidsProtocolsPpisOfInf(Item, Type, ContainerFile, LineNo = -1):
+ ItemNew = Item + TAB_VALUE_SPLIT
+ List = GetSplitValueList(ItemNew)
+ if List[1] != '':
+ CheckPcdTokenInfo(List[1], Type, ContainerFile, LineNo)
+
+ return (List[0], List[1])
+
+## Get Guids/Protocols/Ppis
+#
+# Get Guids/Protocols/Ppis of Dec as <GuidCName>=<GuidValue>
+#
+# @param Item: String as <GuidCName>=<GuidValue>
+# @param Type: Type of parsing string
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1])
+#
+def GetGuidsProtocolsPpisOfDec(Item, Type, ContainerFile, LineNo = -1):
+ List = GetSplitValueList(Item, DataType.TAB_EQUAL_SPLIT)
+ if len(List) != 2:
+ RaiseParserError(Item, Type, ContainerFile, '<CName>=<GuidValue>', LineNo)
+
+ return (List[0], List[1])
+
+## GetPackage
+#
+# Get Package of Inf as <PackagePath>[|<PcdFeatureFlag>]
+#
+# @param Item: String as <PackagePath>[|<PcdFeatureFlag>]
+# @param Type: Type of parsing string
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1])
+#
+def GetPackage(Item, ContainerFile, FileRelativePath, LineNo = -1):
+ ItemNew = Item + TAB_VALUE_SPLIT
+ List = GetSplitValueList(ItemNew)
+ CheckFileType(List[0], '.Dec', ContainerFile, 'package', List[0], LineNo)
+ CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Packages', List[0], LineNo)
+
+ if List[1] != '':
+ CheckPcdTokenInfo(List[1], 'Packages', ContainerFile, LineNo)
+
+ return (List[0], List[1])
+
+## Get Pcd Values of Inf
+#
+# Get Pcd of Inf as <TokenSpaceGuidCName>.<PcdCName>[|<Value>]
+#
+# @param Item: The string describes pcd
+# @param Type: The type of Pcd
+# @param File: The file which describes the pcd, used for error report
+#
+# @retval (TokenSpcCName, TokenCName, Value, ItemType) Formatted Pcd Item
+#
+def GetPcdOfInf(Item, Type, File, LineNo):
+ Format = '<TokenSpaceGuidCName>.<PcdCName>[|<Value>]'
+ TokenGuid, TokenName, Value, InfType = '', '', '', ''
+
+ if Type == TAB_PCDS_FIXED_AT_BUILD:
+ InfType = TAB_INF_FIXED_PCD
+ elif Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ InfType = TAB_INF_PATCH_PCD
+ elif Type == TAB_PCDS_FEATURE_FLAG:
+ InfType = TAB_INF_FEATURE_PCD
+ elif Type == TAB_PCDS_DYNAMIC_EX:
+ InfType = TAB_INF_PCD_EX
+ elif Type == TAB_PCDS_DYNAMIC:
+ InfType = TAB_INF_PCD
+ List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT)
+ if len(List) < 2 or len(List) > 3:
+ RaiseParserError(Item, InfType, File, Format, LineNo)
+ else:
+ Value = List[1]
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ if len(TokenInfo) != 2:
+ RaiseParserError(Item, InfType, File, Format, LineNo)
+ else:
+ TokenGuid = TokenInfo[0]
+ TokenName = TokenInfo[1]
+
+ return (TokenGuid, TokenName, Value, Type)
+
+
+## Get Pcd Values of Dec
+#
+# Get Pcd of Dec as <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
+# @retval (TokenSpcCName, TokenCName, Value, DatumType, Token, ItemType) Formatted Pcd Item
+#
+def GetPcdOfDec(Item, Type, File, LineNo = -1):
+ Format = '<TokenSpaceGuidCName>.<PcdCName>|<Value>|<DatumType>|<Token>'
+ TokenGuid, TokenName, Value, DatumType, Token = '', '', '', '', ''
+ List = GetSplitValueList(Item)
+ if len(List) != 4:
+ RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
+ else:
+ Value = List[1]
+ DatumType = List[2]
+ Token = List[3]
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ if len(TokenInfo) != 2:
+ RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
+ else:
+ TokenGuid = TokenInfo[0]
+ TokenName = TokenInfo[1]
+
+ return (TokenGuid, TokenName, Value, DatumType, Token, Type)
+
+## Parse DEFINE statement
+#
+# Get DEFINE macros
+#
+# 1. Insert a record into TblDec
+# Value1: Macro Name
+# Value2: Macro Value
+#
+def ParseDefine(LineValue, StartLine, Table, FileID, Filename, SectionName, SectionModel, Arch):
+ EdkLogger.debug(EdkLogger.DEBUG_2, "DEFINE statement '%s' found in section %s" % (LineValue, SectionName))
+ Define = GetSplitValueList(CleanString(LineValue[LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') + len(DataType.TAB_DEFINE + ' ') : ]), TAB_EQUAL_SPLIT, 1)
+ Table.Insert(MODEL_META_DATA_DEFINE, Define[0], Define[1], '', '', '', Arch, SectionModel, FileID, StartLine, -1, StartLine, -1, 0)
+
+## InsertSectionItems
+#
+# Insert item data of a section to a dict
+#
+def InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, RecordSet):
+ # Insert each item data of a section
+ for Index in range(0, len(ArchList)):
+ Arch = ArchList[Index]
+ Third = ThirdList[Index]
+ if Arch == '':
+ Arch = TAB_ARCH_COMMON
+
+ Records = RecordSet[Model]
+ for SectionItem in SectionItemList:
+ BelongsToItem, EndLine, EndColumn = -1, -1, -1
+ LineValue, StartLine, EndLine, Comment = SectionItem[0], SectionItem[1], SectionItem[1], SectionItem[2]
+
+ EdkLogger.debug(4, "Parsing %s ..." %LineValue)
+ # And then parse DEFINE statement
+ if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
+ continue
+
+ # At last parse other sections
+ ID = -1
+ Records.append([LineValue, Arch, StartLine, ID, Third, Comment])
+
+ if RecordSet != {}:
+ RecordSet[Model] = Records
+
+## Insert records to database
+#
+# Insert item data of a section to database
+# @param Table: The Table to be inserted
+# @param FileID: The ID of belonging file
+# @param Filename: The name of belonging file
+# @param CurrentSection: The name of current section
+# @param SectionItemList: A list of items of the section
+# @param ArchList: A list of arches
+# @param ThirdList: A list of third parameters, ModuleType for LibraryClass and SkuId for Dynamic Pcds
+# @param IfDefList: A list of all conditional statements
+# @param RecordSet: A dict of all parsed records
+#
+def InsertSectionItemsIntoDatabase(Table, FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, RecordSet):
+ #
+ # Insert each item data of a section
+ #
+ for Index in range(0, len(ArchList)):
+ Arch = ArchList[Index]
+ Third = ThirdList[Index]
+ if Arch == '':
+ Arch = TAB_ARCH_COMMON
+
+ Records = RecordSet[Model]
+ for SectionItem in SectionItemList:
+ BelongsToItem, EndLine, EndColumn = -1, -1, -1
+ LineValue, StartLine, EndLine = SectionItem[0], SectionItem[1], SectionItem[1]
+
+ EdkLogger.debug(4, "Parsing %s ..." %LineValue)
+ #
+ # And then parse DEFINE statement
+ #
+ if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
+ ParseDefine(LineValue, StartLine, Table, FileID, Filename, CurrentSection, Model, Arch)
+ continue
+
+ #
+ # At last parse other sections
+ #
+ ID = Table.Insert(Model, LineValue, Third, Third, '', '', Arch, -1, FileID, StartLine, -1, StartLine, -1, 0)
+ Records.append([LineValue, Arch, StartLine, ID, Third])
+
+ if RecordSet != {}:
+ RecordSet[Model] = Records
+
+## GenMetaDatSectionItem
+def GenMetaDatSectionItem(Key, Value, List):
+ if Key not in List:
+ List[Key] = [Value]
+ else:
+ List[Key].append(Value)
+
+## IsValidWord
+#
+# Check whether the word is valid.
+# <Word> ::= (a-zA-Z0-9_)(a-zA-Z0-9_-){0,} Alphanumeric characters with
+# optional
+# dash "-" and/or underscore "_" characters. No whitespace
+# characters are permitted.
+#
+# @param Word: The word string need to be checked.
+#
+def IsValidWord(Word):
+ if not Word:
+ return False
+ #
+ # The first char should be alpha, _ or Digit.
+ #
+ if not Word[0].isalnum() and \
+ not Word[0] == '_' and \
+ not Word[0].isdigit():
+ return False
+
+ LastChar = ''
+ for Char in Word[1:]:
+ if (not Char.isalpha()) and \
+ (not Char.isdigit()) and \
+ Char != '-' and \
+ Char != '_' and \
+ Char != '.':
+ return False
+ if Char == '.' and LastChar == '.':
+ return False
+ LastChar = Char
+
+ return True
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/RangeExpression.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/RangeExpression.py
new file mode 100755
index 00000000..88a177a4
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/RangeExpression.py
@@ -0,0 +1,694 @@
+# # @file
+# This file is used to parse and evaluate range expression in Pcd declaration.
+#
+# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+# # Import Modules
+#
+from __future__ import print_function
+from Common.GlobalData import *
+from CommonDataClass.Exceptions import BadExpression
+from CommonDataClass.Exceptions import WrnExpression
+import uuid
+from Common.Expression import PcdPattern, BaseExpression
+from Common.DataType import *
+from re import compile
+
+ERR_STRING_EXPR = 'This operator cannot be used in string expression: [%s].'
+ERR_SNYTAX = 'Syntax error, the rest of expression cannot be evaluated: [%s].'
+ERR_MATCH = 'No matching right parenthesis.'
+ERR_STRING_TOKEN = 'Bad string token: [%s].'
+ERR_MACRO_TOKEN = 'Bad macro token: [%s].'
+ERR_EMPTY_TOKEN = 'Empty token is not allowed.'
+ERR_PCD_RESOLVE = 'The PCD should be FeatureFlag type or FixedAtBuild type: [%s].'
+ERR_VALID_TOKEN = 'No more valid token found from rest of string: [%s].'
+ERR_EXPR_TYPE = 'Different types found in expression.'
+ERR_OPERATOR_UNSUPPORT = 'Unsupported operator: [%s]'
+ERR_REL_NOT_IN = 'Expect "IN" after "not" operator.'
+WRN_BOOL_EXPR = 'Operand of boolean type cannot be used in arithmetic expression.'
+WRN_EQCMP_STR_OTHERS = '== Comparison between Operand of string type and Boolean/Number Type always return False.'
+WRN_NECMP_STR_OTHERS = '!= Comparison between Operand of string type and Boolean/Number Type always return True.'
+ERR_RELCMP_STR_OTHERS = 'Operator taking Operand of string type and Boolean/Number Type is not allowed: [%s].'
+ERR_STRING_CMP = 'Unicode string and general string cannot be compared: [%s %s %s]'
+ERR_ARRAY_TOKEN = 'Bad C array or C format GUID token: [%s].'
+ERR_ARRAY_ELE = 'This must be HEX value for NList or Array: [%s].'
+ERR_EMPTY_EXPR = 'Empty expression is not allowed.'
+ERR_IN_OPERAND = 'Macro after IN operator can only be: $(FAMILY), $(ARCH), $(TOOL_CHAIN_TAG) and $(TARGET).'
+
+class RangeObject(object):
+ def __init__(self, start, end, empty = False):
+
+ if int(start) < int(end):
+ self.start = int(start)
+ self.end = int(end)
+ else:
+ self.start = int(end)
+ self.end = int(start)
+ self.empty = empty
+
+class RangeContainer(object):
+ def __init__(self):
+ self.rangelist = []
+
+ def push(self, RangeObject):
+ self.rangelist.append(RangeObject)
+ self.rangelist = sorted(self.rangelist, key = lambda rangeobj : rangeobj.start)
+ self.merge()
+
+ def pop(self):
+ for item in self.rangelist:
+ yield item
+
+ def __clean__(self):
+ newrangelist = []
+ for rangeobj in self.rangelist:
+ if rangeobj.empty == True:
+ continue
+ else:
+ newrangelist.append(rangeobj)
+ self.rangelist = newrangelist
+ def merge(self):
+ self.__clean__()
+ for i in range(0, len(self.rangelist) - 1):
+ if self.rangelist[i + 1].start > self.rangelist[i].end:
+ continue
+ else:
+ self.rangelist[i + 1].start = self.rangelist[i].start
+ self.rangelist[i + 1].end = self.rangelist[i + 1].end > self.rangelist[i].end and self.rangelist[i + 1].end or self.rangelist[i].end
+ self.rangelist[i].empty = True
+
+ self.__clean__()
+
+ def dump(self):
+ print("----------------------")
+ rangelist = ""
+ for object in self.rangelist:
+ rangelist = rangelist + "[%d , %d]" % (object.start, object.end)
+ print(rangelist)
+
+
+class XOROperatorObject(object):
+ def __init__(self):
+ pass
+ def Calculate(self, Operand, DataType, SymbolTable):
+ if isinstance(Operand, type('')) and not Operand.isalnum():
+ Expr = "XOR ..."
+ raise BadExpression(ERR_SNYTAX % Expr)
+ rangeId = str(uuid.uuid1())
+ rangeContainer = RangeContainer()
+ rangeContainer.push(RangeObject(0, int(Operand) - 1))
+ rangeContainer.push(RangeObject(int(Operand) + 1, MAX_VAL_TYPE[DataType]))
+ SymbolTable[rangeId] = rangeContainer
+ return rangeId
+
+class LEOperatorObject(object):
+ def __init__(self):
+ pass
+ def Calculate(self, Operand, DataType, SymbolTable):
+ if isinstance(Operand, type('')) and not Operand.isalnum():
+ Expr = "LE ..."
+ raise BadExpression(ERR_SNYTAX % Expr)
+ rangeId1 = str(uuid.uuid1())
+ rangeContainer = RangeContainer()
+ rangeContainer.push(RangeObject(0, int(Operand)))
+ SymbolTable[rangeId1] = rangeContainer
+ return rangeId1
+class LTOperatorObject(object):
+ def __init__(self):
+ pass
+ def Calculate(self, Operand, DataType, SymbolTable):
+ if isinstance(Operand, type('')) and not Operand.isalnum():
+ Expr = "LT ..."
+ raise BadExpression(ERR_SNYTAX % Expr)
+ rangeId1 = str(uuid.uuid1())
+ rangeContainer = RangeContainer()
+ rangeContainer.push(RangeObject(0, int(Operand) - 1))
+ SymbolTable[rangeId1] = rangeContainer
+ return rangeId1
+
+class GEOperatorObject(object):
+ def __init__(self):
+ pass
+ def Calculate(self, Operand, DataType, SymbolTable):
+ if isinstance(Operand, type('')) and not Operand.isalnum():
+ Expr = "GE ..."
+ raise BadExpression(ERR_SNYTAX % Expr)
+ rangeId1 = str(uuid.uuid1())
+ rangeContainer = RangeContainer()
+ rangeContainer.push(RangeObject(int(Operand), MAX_VAL_TYPE[DataType]))
+ SymbolTable[rangeId1] = rangeContainer
+ return rangeId1
+
+class GTOperatorObject(object):
+ def __init__(self):
+ pass
+ def Calculate(self, Operand, DataType, SymbolTable):
+ if isinstance(Operand, type('')) and not Operand.isalnum():
+ Expr = "GT ..."
+ raise BadExpression(ERR_SNYTAX % Expr)
+ rangeId1 = str(uuid.uuid1())
+ rangeContainer = RangeContainer()
+ rangeContainer.push(RangeObject(int(Operand) + 1, MAX_VAL_TYPE[DataType]))
+ SymbolTable[rangeId1] = rangeContainer
+ return rangeId1
+
+class EQOperatorObject(object):
+ def __init__(self):
+ pass
+ def Calculate(self, Operand, DataType, SymbolTable):
+ if isinstance(Operand, type('')) and not Operand.isalnum():
+ Expr = "EQ ..."
+ raise BadExpression(ERR_SNYTAX % Expr)
+ rangeId1 = str(uuid.uuid1())
+ rangeContainer = RangeContainer()
+ rangeContainer.push(RangeObject(int(Operand), int(Operand)))
+ SymbolTable[rangeId1] = rangeContainer
+ return rangeId1
+
+def GetOperatorObject(Operator):
+ if Operator == '>':
+ return GTOperatorObject()
+ elif Operator == '>=':
+ return GEOperatorObject()
+ elif Operator == '<':
+ return LTOperatorObject()
+ elif Operator == '<=':
+ return LEOperatorObject()
+ elif Operator == '==':
+ return EQOperatorObject()
+ elif Operator == '^':
+ return XOROperatorObject()
+ else:
+ raise BadExpression("Bad Operator")
+
+class RangeExpression(BaseExpression):
+ # Logical operator mapping
+ LogicalOperators = {
+ '&&' : 'and', '||' : 'or',
+ '!' : 'not', 'AND': 'and',
+ 'OR' : 'or' , 'NOT': 'not',
+ 'XOR': '^' , 'xor': '^',
+ 'EQ' : '==' , 'NE' : '!=',
+ 'GT' : '>' , 'LT' : '<',
+ 'GE' : '>=' , 'LE' : '<=',
+ 'IN' : 'in'
+ }
+
+ NonLetterOpLst = ['+', '-', '&', '|', '^', '!', '=', '>', '<']
+
+ RangePattern = compile(r'[0-9]+ - [0-9]+')
+
+ def preProcessRangeExpr(self, expr):
+ # convert hex to int
+ # convert interval to object index. ex. 1 - 10 to a GUID
+ expr = expr.strip()
+ NumberDict = {}
+ for HexNumber in gHexPattern.findall(expr):
+ Number = str(int(HexNumber, 16))
+ NumberDict[HexNumber] = Number
+ for HexNum in NumberDict:
+ expr = expr.replace(HexNum, NumberDict[HexNum])
+
+ rangedict = {}
+ for validrange in self.RangePattern.findall(expr):
+ start, end = validrange.split(" - ")
+ start = start.strip()
+ end = end.strip()
+ rangeid = str(uuid.uuid1())
+ rangeContainer = RangeContainer()
+ rangeContainer.push(RangeObject(start, end))
+ self.operanddict[str(rangeid)] = rangeContainer
+ rangedict[validrange] = str(rangeid)
+
+ for validrange in rangedict:
+ expr = expr.replace(validrange, rangedict[validrange])
+
+ self._Expr = expr
+ return expr
+
+
+ def EvalRange(self, Operator, Oprand):
+
+ operatorobj = GetOperatorObject(Operator)
+ return operatorobj.Calculate(Oprand, self.PcdDataType, self.operanddict)
+
+ def Rangeintersection(self, Oprand1, Oprand2):
+ rangeContainer1 = self.operanddict[Oprand1]
+ rangeContainer2 = self.operanddict[Oprand2]
+ rangeContainer = RangeContainer()
+ for range1 in rangeContainer1.pop():
+ for range2 in rangeContainer2.pop():
+ start1 = range1.start
+ end1 = range1.end
+ start2 = range2.start
+ end2 = range2.end
+ if start1 >= start2:
+ start1, start2 = start2, start1
+ end1, end2 = end2, end1
+ if range1.empty:
+ rangeid = str(uuid.uuid1())
+ rangeContainer.push(RangeObject(0, 0, True))
+ if end1 < start2:
+ rangeid = str(uuid.uuid1())
+ rangeContainer.push(RangeObject(0, 0, True))
+ elif end1 == start2:
+ rangeid = str(uuid.uuid1())
+ rangeContainer.push(RangeObject(end1, end1))
+ elif end1 <= end2 and end1 > start2:
+ rangeid = str(uuid.uuid1())
+ rangeContainer.push(RangeObject(start2, end1))
+ elif end1 >= end2:
+ rangeid = str(uuid.uuid1())
+ rangeContainer.push(RangeObject(start2, end2))
+
+ self.operanddict[rangeid] = rangeContainer
+# rangeContainer.dump()
+ return rangeid
+
+ def Rangecollections(self, Oprand1, Oprand2):
+
+ rangeContainer1 = self.operanddict[Oprand1]
+ rangeContainer2 = self.operanddict[Oprand2]
+ rangeContainer = RangeContainer()
+
+ for rangeobj in rangeContainer2.pop():
+ rangeContainer.push(rangeobj)
+ for rangeobj in rangeContainer1.pop():
+ rangeContainer.push(rangeobj)
+
+ rangeid = str(uuid.uuid1())
+ self.operanddict[rangeid] = rangeContainer
+
+# rangeContainer.dump()
+ return rangeid
+
+
+ def NegativeRange(self, Oprand1):
+ rangeContainer1 = self.operanddict[Oprand1]
+
+
+ rangeids = []
+
+ for rangeobj in rangeContainer1.pop():
+ rangeContainer = RangeContainer()
+ rangeid = str(uuid.uuid1())
+ if rangeobj.empty:
+ rangeContainer.push(RangeObject(0, MAX_VAL_TYPE[self.PcdDataType]))
+ else:
+ if rangeobj.start > 0:
+ rangeContainer.push(RangeObject(0, rangeobj.start - 1))
+ if rangeobj.end < MAX_VAL_TYPE[self.PcdDataType]:
+ rangeContainer.push(RangeObject(rangeobj.end + 1, MAX_VAL_TYPE[self.PcdDataType]))
+ self.operanddict[rangeid] = rangeContainer
+ rangeids.append(rangeid)
+
+ if len(rangeids) == 0:
+ rangeContainer = RangeContainer()
+ rangeContainer.push(RangeObject(0, MAX_VAL_TYPE[self.PcdDataType]))
+ rangeid = str(uuid.uuid1())
+ self.operanddict[rangeid] = rangeContainer
+ return rangeid
+
+ if len(rangeids) == 1:
+ return rangeids[0]
+
+ re = self.Rangeintersection(rangeids[0], rangeids[1])
+ for i in range(2, len(rangeids)):
+ re = self.Rangeintersection(re, rangeids[i])
+
+ rangeid2 = str(uuid.uuid1())
+ self.operanddict[rangeid2] = self.operanddict[re]
+ return rangeid2
+
+ def Eval(self, Operator, Oprand1, Oprand2 = None):
+
+ if Operator in ["!", "NOT", "not"]:
+ if not gGuidPattern.match(Oprand1.strip()):
+ raise BadExpression(ERR_STRING_EXPR % Operator)
+ return self.NegativeRange(Oprand1)
+ else:
+ if Operator in ["==", ">=", "<=", ">", "<", '^']:
+ return self.EvalRange(Operator, Oprand1)
+ elif Operator == 'and' :
+ if not gGuidPatternEnd.match(Oprand1.strip()) or not gGuidPatternEnd.match(Oprand2.strip()):
+ raise BadExpression(ERR_STRING_EXPR % Operator)
+ return self.Rangeintersection(Oprand1, Oprand2)
+ elif Operator == 'or':
+ if not gGuidPatternEnd.match(Oprand1.strip()) or not gGuidPatternEnd.match(Oprand2.strip()):
+ raise BadExpression(ERR_STRING_EXPR % Operator)
+ return self.Rangecollections(Oprand1, Oprand2)
+ else:
+ raise BadExpression(ERR_STRING_EXPR % Operator)
+
+
+ def __init__(self, Expression, PcdDataType, SymbolTable = None):
+ if SymbolTable is None:
+ SymbolTable = {}
+ super(RangeExpression, self).__init__(self, Expression, PcdDataType, SymbolTable)
+ self._NoProcess = False
+ if not isinstance(Expression, type('')):
+ self._Expr = Expression
+ self._NoProcess = True
+ return
+
+ self._Expr = Expression.strip()
+
+ if not self._Expr.strip():
+ raise BadExpression(ERR_EMPTY_EXPR)
+
+ #
+ # The symbol table including PCD and macro mapping
+ #
+ self._Symb = SymbolTable
+ self._Symb.update(self.LogicalOperators)
+ self._Idx = 0
+ self._Len = len(self._Expr)
+ self._Token = ''
+ self._WarnExcept = None
+
+
+ # Literal token without any conversion
+ self._LiteralToken = ''
+
+ # store the operand object
+ self.operanddict = {}
+ # The Pcd max value depends on PcdDataType
+ self.PcdDataType = PcdDataType
+
+ # Public entry for this class
+ # @param RealValue: False: only evaluate if the expression is true or false, used for conditional expression
+ # True : return the evaluated str(value), used for PCD value
+ #
+ # @return: True or False if RealValue is False
+ # Evaluated value of string format if RealValue is True
+ #
+ def __call__(self, RealValue = False, Depth = 0):
+ if self._NoProcess:
+ return self._Expr
+
+ self._Depth = Depth
+
+ self._Expr = self._Expr.strip()
+
+ self.preProcessRangeExpr(self._Expr)
+
+ # check if the expression does not need to evaluate
+ if RealValue and Depth == 0:
+ self._Token = self._Expr
+ if gGuidPatternEnd.match(self._Expr):
+ return [self.operanddict[self._Expr] ]
+
+ self._Idx = 0
+ self._Token = ''
+
+ Val = self._OrExpr()
+ RealVal = Val
+
+ RangeIdList = RealVal.split("or")
+ RangeList = []
+ for rangeid in RangeIdList:
+ RangeList.append(self.operanddict[rangeid.strip()])
+
+ return RangeList
+
+ # Template function to parse binary operators which have same precedence
+ # Expr [Operator Expr]*
+ def _ExprFuncTemplate(self, EvalFunc, OpSet):
+ Val = EvalFunc()
+ while self._IsOperator(OpSet):
+ Op = self._Token
+ try:
+ Val = self.Eval(Op, Val, EvalFunc())
+ except WrnExpression as Warn:
+ self._WarnExcept = Warn
+ Val = Warn.result
+ return Val
+
+ # A [|| B]*
+ def _OrExpr(self):
+ return self._ExprFuncTemplate(self._AndExpr, {"OR", "or"})
+
+ # A [&& B]*
+ def _AndExpr(self):
+ return self._ExprFuncTemplate(self._NeExpr, {"AND", "and"})
+
+ def _NeExpr(self):
+ Val = self._RelExpr()
+ while self._IsOperator({"!=", "NOT", "not"}):
+ Op = self._Token
+ if Op in ["!", "NOT", "not"]:
+ if not self._IsOperator({"IN", "in"}):
+ raise BadExpression(ERR_REL_NOT_IN)
+ Op += ' ' + self._Token
+ try:
+ Val = self.Eval(Op, Val, self._RelExpr())
+ except WrnExpression as Warn:
+ self._WarnExcept = Warn
+ Val = Warn.result
+ return Val
+
+ # [!]*A
+ def _RelExpr(self):
+ if self._IsOperator({"NOT", "LE", "GE", "LT", "GT", "EQ", "XOR"}):
+ Token = self._Token
+ Val = self._NeExpr()
+ try:
+ return self.Eval(Token, Val)
+ except WrnExpression as Warn:
+ self._WarnExcept = Warn
+ return Warn.result
+ return self._IdenExpr()
+
+ # Parse identifier or encapsulated expression
+ def _IdenExpr(self):
+ Tk = self._GetToken()
+ if Tk == '(':
+ Val = self._OrExpr()
+ try:
+ # _GetToken may also raise BadExpression
+ if self._GetToken() != ')':
+ raise BadExpression(ERR_MATCH)
+ except BadExpression:
+ raise BadExpression(ERR_MATCH)
+ return Val
+ return Tk
+
+ # Skip whitespace or tab
+ def __SkipWS(self):
+ for Char in self._Expr[self._Idx:]:
+ if Char not in ' \t':
+ break
+ self._Idx += 1
+
+ # Try to convert string to number
+ def __IsNumberToken(self):
+ Radix = 10
+ if self._Token.lower()[0:2] == '0x' and len(self._Token) > 2:
+ Radix = 16
+ try:
+ self._Token = int(self._Token, Radix)
+ return True
+ except ValueError:
+ return False
+ except TypeError:
+ return False
+
+ # Parse array: {...}
+ def __GetArray(self):
+ Token = '{'
+ self._Idx += 1
+ self.__GetNList(True)
+ Token += self._LiteralToken
+ if self._Idx >= self._Len or self._Expr[self._Idx] != '}':
+ raise BadExpression(ERR_ARRAY_TOKEN % Token)
+ Token += '}'
+
+ # All whitespace and tabs in array are already stripped.
+ IsArray = IsGuid = False
+ if len(Token.split(',')) == 11 and len(Token.split(',{')) == 2 \
+ and len(Token.split('},')) == 1:
+ HexLen = [11, 6, 6, 5, 4, 4, 4, 4, 4, 4, 6]
+ HexList = Token.split(',')
+ if HexList[3].startswith('{') and \
+ not [Index for Index, Hex in enumerate(HexList) if len(Hex) > HexLen[Index]]:
+ IsGuid = True
+ if Token.lstrip('{').rstrip('}').find('{') == -1:
+ if not [Hex for Hex in Token.lstrip('{').rstrip('}').split(',') if len(Hex) > 4]:
+ IsArray = True
+ if not IsArray and not IsGuid:
+ raise BadExpression(ERR_ARRAY_TOKEN % Token)
+ self._Idx += 1
+ self._Token = self._LiteralToken = Token
+ return self._Token
+
+ # Parse string, the format must be: "..."
+ def __GetString(self):
+ Idx = self._Idx
+
+ # Skip left quote
+ self._Idx += 1
+
+ # Replace escape \\\", \"
+ Expr = self._Expr[self._Idx:].replace('\\\\', '//').replace('\\\"', '\\\'')
+ for Ch in Expr:
+ self._Idx += 1
+ if Ch == '"':
+ break
+ self._Token = self._LiteralToken = self._Expr[Idx:self._Idx]
+ if not self._Token.endswith('"'):
+ raise BadExpression(ERR_STRING_TOKEN % self._Token)
+ self._Token = self._Token[1:-1]
+ return self._Token
+
+ # Get token that is comprised by alphanumeric, underscore or dot(used by PCD)
+ # @param IsAlphaOp: Indicate if parsing general token or script operator(EQ, NE...)
+ def __GetIdToken(self, IsAlphaOp = False):
+ IdToken = ''
+ for Ch in self._Expr[self._Idx:]:
+ if not self.__IsIdChar(Ch):
+ break
+ self._Idx += 1
+ IdToken += Ch
+
+ self._Token = self._LiteralToken = IdToken
+ if not IsAlphaOp:
+ self.__ResolveToken()
+ return self._Token
+
+ # Try to resolve token
+ def __ResolveToken(self):
+ if not self._Token:
+ raise BadExpression(ERR_EMPTY_TOKEN)
+
+ # PCD token
+ if PcdPattern.match(self._Token):
+ if self._Token not in self._Symb:
+ Ex = BadExpression(ERR_PCD_RESOLVE % self._Token)
+ Ex.Pcd = self._Token
+ raise Ex
+ self._Token = RangeExpression(self._Symb[self._Token], self._Symb)(True, self._Depth + 1)
+ if not isinstance(self._Token, type('')):
+ self._LiteralToken = hex(self._Token)
+ return
+
+ if self._Token.startswith('"'):
+ self._Token = self._Token[1:-1]
+ elif self._Token in ["FALSE", "false", "False"]:
+ self._Token = False
+ elif self._Token in ["TRUE", "true", "True"]:
+ self._Token = True
+ else:
+ self.__IsNumberToken()
+
+ def __GetNList(self, InArray = False):
+ self._GetSingleToken()
+ if not self.__IsHexLiteral():
+ if InArray:
+ raise BadExpression(ERR_ARRAY_ELE % self._Token)
+ return self._Token
+
+ self.__SkipWS()
+ Expr = self._Expr[self._Idx:]
+ if not Expr.startswith(','):
+ return self._Token
+
+ NList = self._LiteralToken
+ while Expr.startswith(','):
+ NList += ','
+ self._Idx += 1
+ self.__SkipWS()
+ self._GetSingleToken()
+ if not self.__IsHexLiteral():
+ raise BadExpression(ERR_ARRAY_ELE % self._Token)
+ NList += self._LiteralToken
+ self.__SkipWS()
+ Expr = self._Expr[self._Idx:]
+ self._Token = self._LiteralToken = NList
+ return self._Token
+
+ def __IsHexLiteral(self):
+ if self._LiteralToken.startswith('{') and \
+ self._LiteralToken.endswith('}'):
+ return True
+
+ if gHexPattern.match(self._LiteralToken):
+ Token = self._LiteralToken[2:]
+ Token = Token.lstrip('0')
+ if not Token:
+ self._LiteralToken = '0x0'
+ else:
+ self._LiteralToken = '0x' + Token.lower()
+ return True
+ return False
+
+ def _GetToken(self):
+ return self.__GetNList()
+
+ @staticmethod
+ def __IsIdChar(Ch):
+ return Ch in '._/:' or Ch.isalnum()
+
+ # Parse operand
+ def _GetSingleToken(self):
+ self.__SkipWS()
+ Expr = self._Expr[self._Idx:]
+ if Expr.startswith('L"'):
+ # Skip L
+ self._Idx += 1
+ UStr = self.__GetString()
+ self._Token = 'L"' + UStr + '"'
+ return self._Token
+
+ self._Token = ''
+ if Expr:
+ Ch = Expr[0]
+ Match = gGuidPattern.match(Expr)
+ if Match and not Expr[Match.end():Match.end() + 1].isalnum() \
+ and Expr[Match.end():Match.end() + 1] != '_':
+ self._Idx += Match.end()
+ self._Token = Expr[0:Match.end()]
+ return self._Token
+ elif self.__IsIdChar(Ch):
+ return self.__GetIdToken()
+ elif Ch == '(' or Ch == ')':
+ self._Idx += 1
+ self._Token = Ch
+ return self._Token
+
+ raise BadExpression(ERR_VALID_TOKEN % Expr)
+
+ # Parse operator
+ def _GetOperator(self):
+ self.__SkipWS()
+ LegalOpLst = ['&&', '||', '!=', '==', '>=', '<='] + self.NonLetterOpLst
+
+ self._Token = ''
+ Expr = self._Expr[self._Idx:]
+
+ # Reach end of expression
+ if not Expr:
+ return ''
+
+ # Script operator: LT, GT, LE, GE, EQ, NE, and, or, xor, not
+ if Expr[0].isalpha():
+ return self.__GetIdToken(True)
+
+ # Start to get regular operator: +, -, <, > ...
+ if Expr[0] not in self.NonLetterOpLst:
+ return ''
+
+ OpToken = ''
+ for Ch in Expr:
+ if Ch in self.NonLetterOpLst:
+ if '!' == Ch and OpToken:
+ break
+ self._Idx += 1
+ OpToken += Ch
+ else:
+ break
+
+ if OpToken not in LegalOpLst:
+ raise BadExpression(ERR_OPERATOR_UNSUPPORT % OpToken)
+ self._Token = OpToken
+ return OpToken
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/StringUtils.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/StringUtils.py
new file mode 100755
index 00000000..904c39c9
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/StringUtils.py
@@ -0,0 +1,873 @@
+## @file
+# This file is used to define common string related functions used in parsing process
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import re
+from . import DataType
+import Common.LongFilePathOs as os
+import string
+from . import EdkLogger as EdkLogger
+
+from . import GlobalData
+from .BuildToolError import *
+from CommonDataClass.Exceptions import *
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+
+gHexVerPatt = re.compile('0x[a-f0-9]{4}[a-f0-9]{4}$', re.IGNORECASE)
+gHumanReadableVerPatt = re.compile(r'([1-9][0-9]*|0)\.[0-9]{1,2}$')
+
+## GetSplitValueList
+#
+# Get a value list from a string with multiple values split with SplitTag
+# The default SplitTag is DataType.TAB_VALUE_SPLIT
+# 'AAA|BBB|CCC' -> ['AAA', 'BBB', 'CCC']
+#
+# @param String: The input string to be splitted
+# @param SplitTag: The split key, default is DataType.TAB_VALUE_SPLIT
+# @param MaxSplit: The max number of split values, default is -1
+#
+# @retval list() A list for splitted string
+#
+def GetSplitValueList(String, SplitTag=DataType.TAB_VALUE_SPLIT, MaxSplit= -1):
+ ValueList = []
+ Last = 0
+ Escaped = False
+ InSingleQuoteString = False
+ InDoubleQuoteString = False
+ InParenthesis = 0
+ for Index in range(0, len(String)):
+ Char = String[Index]
+
+ if not Escaped:
+ # Found a splitter not in a string, split it
+ if (not InSingleQuoteString or not InDoubleQuoteString) and InParenthesis == 0 and Char == SplitTag:
+ ValueList.append(String[Last:Index].strip())
+ Last = Index + 1
+ if MaxSplit > 0 and len(ValueList) >= MaxSplit:
+ break
+
+ if Char == '\\' and (InSingleQuoteString or InDoubleQuoteString):
+ Escaped = True
+ elif Char == '"' and not InSingleQuoteString:
+ if not InDoubleQuoteString:
+ InDoubleQuoteString = True
+ else:
+ InDoubleQuoteString = False
+ elif Char == "'" and not InDoubleQuoteString:
+ if not InSingleQuoteString:
+ InSingleQuoteString = True
+ else:
+ InSingleQuoteString = False
+ elif Char == '(':
+ InParenthesis = InParenthesis + 1
+ elif Char == ')':
+ InParenthesis = InParenthesis - 1
+ else:
+ Escaped = False
+
+ if Last < len(String):
+ ValueList.append(String[Last:].strip())
+ elif Last == len(String):
+ ValueList.append('')
+
+ return ValueList
+
+## GetSplitList
+#
+# Get a value list from a string with multiple values split with SplitString
+# The default SplitTag is DataType.TAB_VALUE_SPLIT
+# 'AAA|BBB|CCC' -> ['AAA', 'BBB', 'CCC']
+#
+# @param String: The input string to be splitted
+# @param SplitStr: The split key, default is DataType.TAB_VALUE_SPLIT
+# @param MaxSplit: The max number of split values, default is -1
+#
+# @retval list() A list for splitted string
+#
+def GetSplitList(String, SplitStr=DataType.TAB_VALUE_SPLIT, MaxSplit= -1):
+ return list(map(lambda l: l.strip(), String.split(SplitStr, MaxSplit)))
+
+## MergeArches
+#
+# Find a key's all arches in dict, add the new arch to the list
+# If not exist any arch, set the arch directly
+#
+# @param Dict: The input value for Dict
+# @param Key: The input value for Key
+# @param Arch: The Arch to be added or merged
+#
+def MergeArches(Dict, Key, Arch):
+ if Key in Dict:
+ Dict[Key].append(Arch)
+ else:
+ Dict[Key] = Arch.split()
+
+## GenDefines
+#
+# Parse a string with format "DEFINE <VarName> = <PATH>"
+# Generate a map Defines[VarName] = PATH
+# Return False if invalid format
+#
+# @param String: String with DEFINE statement
+# @param Arch: Supported Arch
+# @param Defines: DEFINE statement to be parsed
+#
+# @retval 0 DEFINE statement found, and valid
+# @retval 1 DEFINE statement found, but not valid
+# @retval -1 DEFINE statement not found
+#
+def GenDefines(String, Arch, Defines):
+ if String.find(DataType.TAB_DEFINE + ' ') > -1:
+ List = String.replace(DataType.TAB_DEFINE + ' ', '').split(DataType.TAB_EQUAL_SPLIT)
+ if len(List) == 2:
+ Defines[(CleanString(List[0]), Arch)] = CleanString(List[1])
+ return 0
+ else:
+ return -1
+
+ return 1
+
+## GenInclude
+#
+# Parse a string with format "!include <Filename>"
+# Return the file path
+# Return False if invalid format or NOT FOUND
+#
+# @param String: String with INCLUDE statement
+# @param IncludeFiles: INCLUDE statement to be parsed
+# @param Arch: Supported Arch
+#
+# @retval True
+# @retval False
+#
+def GenInclude(String, IncludeFiles, Arch):
+ if String.upper().find(DataType.TAB_INCLUDE.upper() + ' ') > -1:
+ IncludeFile = CleanString(String[String.upper().find(DataType.TAB_INCLUDE.upper() + ' ') + len(DataType.TAB_INCLUDE + ' ') : ])
+ MergeArches(IncludeFiles, IncludeFile, Arch)
+ return True
+ else:
+ return False
+
+## GetLibraryClassesWithModuleType
+#
+# Get Library Class definition when no module type defined
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval True Get library classes successfully
+#
+def GetLibraryClassesWithModuleType(Lines, Key, KeyValues, CommentCharacter):
+ newKey = SplitModuleType(Key)
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ LineList = Lines.splitlines()
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line != '' and Line[0] != CommentCharacter:
+ KeyValues.append([CleanString(Line, CommentCharacter), newKey[1]])
+
+ return True
+
+## GetDynamics
+#
+# Get Dynamic Pcds
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval True Get Dynamic Pcds successfully
+#
+def GetDynamics(Lines, Key, KeyValues, CommentCharacter):
+ #
+ # Get SkuId Name List
+ #
+ SkuIdNameList = SplitModuleType(Key)
+
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ LineList = Lines.splitlines()
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line != '' and Line[0] != CommentCharacter:
+ KeyValues.append([CleanString(Line, CommentCharacter), SkuIdNameList[1]])
+
+ return True
+
+## SplitModuleType
+#
+# Split ModuleType out of section defien to get key
+# [LibraryClass.Arch.ModuleType|ModuleType|ModuleType] -> [ 'LibraryClass.Arch', ['ModuleType', 'ModuleType', 'ModuleType'] ]
+#
+# @param Key: String to be parsed
+#
+# @retval ReturnValue A list for module types
+#
+def SplitModuleType(Key):
+ KeyList = Key.split(DataType.TAB_SPLIT)
+ #
+ # Fill in for arch
+ #
+ KeyList.append('')
+ #
+ # Fill in for moduletype
+ #
+ KeyList.append('')
+ ReturnValue = []
+ KeyValue = KeyList[0]
+ if KeyList[1] != '':
+ KeyValue = KeyValue + DataType.TAB_SPLIT + KeyList[1]
+ ReturnValue.append(KeyValue)
+ ReturnValue.append(GetSplitValueList(KeyList[2]))
+
+ return ReturnValue
+
+## Replace macro in strings list
+#
+# This method replace macros used in a given string list. The macros are
+# given in a dictionary.
+#
+# @param StringList StringList to be processed
+# @param MacroDefinitions The macro definitions in the form of dictionary
+# @param SelfReplacement To decide whether replace un-defined macro to ''
+#
+# @retval NewList A new string list whose macros are replaced
+#
+def ReplaceMacros(StringList, MacroDefinitions=None, SelfReplacement=False):
+ NewList = []
+ if MacroDefinitions is None:
+ MacroDefinitions = {}
+ for String in StringList:
+ if isinstance(String, type('')):
+ NewList.append(ReplaceMacro(String, MacroDefinitions, SelfReplacement))
+ else:
+ NewList.append(String)
+
+ return NewList
+
+## Replace macro in string
+#
+# This method replace macros used in given string. The macros are given in a
+# dictionary.
+#
+# @param String String to be processed
+# @param MacroDefinitions The macro definitions in the form of dictionary
+# @param SelfReplacement To decide whether replace un-defined macro to ''
+#
+# @retval string The string whose macros are replaced
+#
+def ReplaceMacro(String, MacroDefinitions=None, SelfReplacement=False, RaiseError=False):
+ LastString = String
+ if MacroDefinitions is None:
+ MacroDefinitions = {}
+ while String and MacroDefinitions:
+ MacroUsed = GlobalData.gMacroRefPattern.findall(String)
+ # no macro found in String, stop replacing
+ if len(MacroUsed) == 0:
+ break
+
+ for Macro in MacroUsed:
+ if Macro not in MacroDefinitions:
+ if RaiseError:
+ raise SymbolNotFound("%s not defined" % Macro)
+ if SelfReplacement:
+ String = String.replace("$(%s)" % Macro, '')
+ continue
+ if "$(%s)" % Macro not in MacroDefinitions[Macro]:
+ String = String.replace("$(%s)" % Macro, MacroDefinitions[Macro])
+ # in case there's macro not defined
+ if String == LastString:
+ break
+ LastString = String
+
+ return String
+
+## NormPath
+#
+# Create a normal path
+# And replace DEFINE in the path
+#
+# @param Path: The input value for Path to be converted
+# @param Defines: A set for DEFINE statement
+#
+# @retval Path Formatted path
+#
+def NormPath(Path, Defines=None):
+ IsRelativePath = False
+ if Path:
+ if Path[0] == '.':
+ IsRelativePath = True
+ #
+ # Replace with Define
+ #
+ if Defines:
+ Path = ReplaceMacro(Path, Defines)
+ #
+ # To local path format
+ #
+ Path = os.path.normpath(Path)
+ if Path.startswith(GlobalData.gWorkspace) and not Path.startswith(GlobalData.gBuildDirectory) and not os.path.exists(Path):
+ Path = Path[len (GlobalData.gWorkspace):]
+ if Path[0] == os.path.sep:
+ Path = Path[1:]
+ Path = mws.join(GlobalData.gWorkspace, Path)
+
+ if IsRelativePath and Path[0] != '.':
+ Path = os.path.join('.', Path)
+
+ return Path
+
+## CleanString
+#
+# Remove comments in a string
+# Remove spaces
+#
+# @param Line: The string to be cleaned
+# @param CommentCharacter: Comment char, used to ignore comment content, default is DataType.TAB_COMMENT_SPLIT
+#
+# @retval Path Formatted path
+#
+def CleanString(Line, CommentCharacter=DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False, BuildOption=False):
+ #
+ # remove whitespace
+ #
+ Line = Line.strip();
+ #
+ # Replace Edk's comment character
+ #
+ if AllowCppStyleComment:
+ Line = Line.replace(DataType.TAB_COMMENT_EDK_SPLIT, CommentCharacter)
+ #
+ # remove comments, but we should escape comment character in string
+ #
+ InDoubleQuoteString = False
+ InSingleQuoteString = False
+ CommentInString = False
+ for Index in range(0, len(Line)):
+ if Line[Index] == '"' and not InSingleQuoteString:
+ InDoubleQuoteString = not InDoubleQuoteString
+ elif Line[Index] == "'" and not InDoubleQuoteString:
+ InSingleQuoteString = not InSingleQuoteString
+ elif Line[Index] == CommentCharacter and (InSingleQuoteString or InDoubleQuoteString):
+ CommentInString = True
+ elif Line[Index] == CommentCharacter and not (InSingleQuoteString or InDoubleQuoteString):
+ Line = Line[0: Index]
+ break
+
+ if CommentInString and BuildOption:
+ Line = Line.replace('"', '')
+ ChIndex = Line.find('#')
+ while ChIndex >= 0:
+ if GlobalData.gIsWindows:
+ if ChIndex == 0 or Line[ChIndex - 1] != '^':
+ Line = Line[0:ChIndex] + '^' + Line[ChIndex:]
+ ChIndex = Line.find('#', ChIndex + 2)
+ else:
+ ChIndex = Line.find('#', ChIndex + 1)
+ else:
+ if ChIndex == 0 or Line[ChIndex - 1] != '\\':
+ Line = Line[0:ChIndex] + '\\' + Line[ChIndex:]
+ ChIndex = Line.find('#', ChIndex + 2)
+ else:
+ ChIndex = Line.find('#', ChIndex + 1)
+ #
+ # remove whitespace again
+ #
+ Line = Line.strip();
+
+ return Line
+
+## CleanString2
+#
+# Split statement with comments in a string
+# Remove spaces
+#
+# @param Line: The string to be cleaned
+# @param CommentCharacter: Comment char, used to ignore comment content, default is DataType.TAB_COMMENT_SPLIT
+#
+# @retval Path Formatted path
+#
+def CleanString2(Line, CommentCharacter=DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):
+ #
+ # remove whitespace
+ #
+ Line = Line.strip();
+ #
+ # Replace Edk's comment character
+ #
+ if AllowCppStyleComment:
+ Line = Line.replace(DataType.TAB_COMMENT_EDK_SPLIT, CommentCharacter)
+ #
+ # separate comments and statements, but we should escape comment character in string
+ #
+ InDoubleQuoteString = False
+ InSingleQuoteString = False
+ CommentInString = False
+ Comment = ''
+ for Index in range(0, len(Line)):
+ if Line[Index] == '"' and not InSingleQuoteString:
+ InDoubleQuoteString = not InDoubleQuoteString
+ elif Line[Index] == "'" and not InDoubleQuoteString:
+ InSingleQuoteString = not InSingleQuoteString
+ elif Line[Index] == CommentCharacter and (InDoubleQuoteString or InSingleQuoteString):
+ CommentInString = True
+ elif Line[Index] == CommentCharacter and not (InDoubleQuoteString or InSingleQuoteString):
+ Comment = Line[Index:].strip()
+ Line = Line[0:Index].strip()
+ break
+
+ return Line, Comment
+
+## GetMultipleValuesOfKeyFromLines
+#
+# Parse multiple strings to clean comment and spaces
+# The result is saved to KeyValues
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval True Successfully executed
+#
+def GetMultipleValuesOfKeyFromLines(Lines, Key, KeyValues, CommentCharacter):
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ LineList = Lines.split('\n')
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line != '' and Line[0] != CommentCharacter:
+ KeyValues.append(Line)
+
+ return True
+
+## GetDefineValue
+#
+# Parse a DEFINE statement to get defined value
+# DEFINE Key Value
+#
+# @param String: The content to be parsed
+# @param Key: The key of DEFINE statement
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval string The defined value
+#
+def GetDefineValue(String, Key, CommentCharacter):
+ String = CleanString(String)
+ return String[String.find(Key + ' ') + len(Key + ' ') : ]
+
+## GetHexVerValue
+#
+# Get a Hex Version Value
+#
+# @param VerString: The version string to be parsed
+#
+#
+# @retval: If VerString is incorrectly formatted, return "None" which will break the build.
+# If VerString is correctly formatted, return a Hex value of the Version Number (0xmmmmnnnn)
+# where mmmm is the major number and nnnn is the adjusted minor number.
+#
+def GetHexVerValue(VerString):
+ VerString = CleanString(VerString)
+
+ if gHumanReadableVerPatt.match(VerString):
+ ValueList = VerString.split('.')
+ Major = ValueList[0]
+ Minor = ValueList[1]
+ if len(Minor) == 1:
+ Minor += '0'
+ DeciValue = (int(Major) << 16) + int(Minor);
+ return "0x%08x" % DeciValue
+ elif gHexVerPatt.match(VerString):
+ return VerString
+ else:
+ return None
+
+
+## GetSingleValueOfKeyFromLines
+#
+# Parse multiple strings as below to get value of each definition line
+# Key1 = Value1
+# Key2 = Value2
+# The result is saved to Dictionary
+#
+# @param Lines: The content to be parsed
+# @param Dictionary: To store data after parsing
+# @param CommentCharacter: Comment char, be used to ignore comment content
+# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+# @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
+# @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
+#
+# @retval True Successfully executed
+#
+def GetSingleValueOfKeyFromLines(Lines, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
+ Lines = Lines.split('\n')
+ Keys = []
+ Value = ''
+ DefineValues = ['']
+ SpecValues = ['']
+
+ for Line in Lines:
+ #
+ # Handle DEFINE and SPEC
+ #
+ if Line.find(DataType.TAB_INF_DEFINES_DEFINE + ' ') > -1:
+ if '' in DefineValues:
+ DefineValues.remove('')
+ DefineValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_DEFINE, CommentCharacter))
+ continue
+ if Line.find(DataType.TAB_INF_DEFINES_SPEC + ' ') > -1:
+ if '' in SpecValues:
+ SpecValues.remove('')
+ SpecValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_SPEC, CommentCharacter))
+ continue
+
+ #
+ # Handle Others
+ #
+ LineList = Line.split(KeySplitCharacter, 1)
+ if len(LineList) >= 2:
+ Key = LineList[0].split()
+ if len(Key) == 1 and Key[0][0] != CommentCharacter:
+ #
+ # Remove comments and white spaces
+ #
+ LineList[1] = CleanString(LineList[1], CommentCharacter)
+ if ValueSplitFlag:
+ Value = list(map(string.strip, LineList[1].split(ValueSplitCharacter)))
+ else:
+ Value = CleanString(LineList[1], CommentCharacter).splitlines()
+
+ if Key[0] in Dictionary:
+ if Key[0] not in Keys:
+ Dictionary[Key[0]] = Value
+ Keys.append(Key[0])
+ else:
+ Dictionary[Key[0]].extend(Value)
+ else:
+ Dictionary[DataType.TAB_INF_DEFINES_MACRO][Key[0]] = Value[0]
+
+ if DefineValues == []:
+ DefineValues = ['']
+ if SpecValues == []:
+ SpecValues = ['']
+ Dictionary[DataType.TAB_INF_DEFINES_DEFINE] = DefineValues
+ Dictionary[DataType.TAB_INF_DEFINES_SPEC] = SpecValues
+
+ return True
+
+## The content to be parsed
+#
+# Do pre-check for a file before it is parsed
+# Check $()
+# Check []
+#
+# @param FileName: Used for error report
+# @param FileContent: File content to be parsed
+# @param SupSectionTag: Used for error report
+#
+def PreCheck(FileName, FileContent, SupSectionTag):
+ LineNo = 0
+ IsFailed = False
+ NewFileContent = ''
+ for Line in FileContent.splitlines():
+ LineNo = LineNo + 1
+ #
+ # Clean current line
+ #
+ Line = CleanString(Line)
+
+ #
+ # Remove commented line
+ #
+ if Line.find(DataType.TAB_COMMA_SPLIT) == 0:
+ Line = ''
+ #
+ # Check $()
+ #
+ if Line.find('$') > -1:
+ if Line.find('$(') < 0 or Line.find(')') < 0:
+ EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=EdkLogger.IsRaiseError)
+
+ #
+ # Check []
+ #
+ if Line.find('[') > -1 or Line.find(']') > -1:
+ #
+ # Only get one '[' or one ']'
+ #
+ if not (Line.find('[') > -1 and Line.find(']') > -1):
+ EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=EdkLogger.IsRaiseError)
+
+ #
+ # Regenerate FileContent
+ #
+ NewFileContent = NewFileContent + Line + '\r\n'
+
+ if IsFailed:
+ EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=EdkLogger.IsRaiseError)
+
+ return NewFileContent
+
+## CheckFileType
+#
+# Check if the Filename is including ExtName
+# Return True if it exists
+# Raise a error message if it not exists
+#
+# @param CheckFilename: Name of the file to be checked
+# @param ExtName: Ext name of the file to be checked
+# @param ContainerFilename: The container file which describes the file to be checked, used for error report
+# @param SectionName: Used for error report
+# @param Line: The line in container file which defines the file to be checked
+#
+# @retval True The file type is correct
+#
+def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo= -1):
+ if CheckFilename != '' and CheckFilename is not None:
+ (Root, Ext) = os.path.splitext(CheckFilename)
+ if Ext.upper() != ExtName.upper():
+ ContainerFile = open(ContainerFilename, 'r').read()
+ if LineNo == -1:
+ LineNo = GetLineNo(ContainerFile, Line)
+ ErrorMsg = "Invalid %s. '%s' is found, but '%s' file is needed" % (SectionName, CheckFilename, ExtName)
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, Line=LineNo,
+ File=ContainerFilename, RaiseError=EdkLogger.IsRaiseError)
+
+ return True
+
+## CheckFileExist
+#
+# Check if the file exists
+# Return True if it exists
+# Raise a error message if it not exists
+#
+# @param CheckFilename: Name of the file to be checked
+# @param WorkspaceDir: Current workspace dir
+# @param ContainerFilename: The container file which describes the file to be checked, used for error report
+# @param SectionName: Used for error report
+# @param Line: The line in container file which defines the file to be checked
+#
+# @retval The file full path if the file exists
+#
+def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo= -1):
+ CheckFile = ''
+ if CheckFilename != '' and CheckFilename is not None:
+ CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
+ if not os.path.isfile(CheckFile):
+ ContainerFile = open(ContainerFilename, 'r').read()
+ if LineNo == -1:
+ LineNo = GetLineNo(ContainerFile, Line)
+ ErrorMsg = "Can't find file '%s' defined in section '%s'" % (CheckFile, SectionName)
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg,
+ File=ContainerFilename, Line=LineNo, RaiseError=EdkLogger.IsRaiseError)
+
+ return CheckFile
+
+## GetLineNo
+#
+# Find the index of a line in a file
+#
+# @param FileContent: Search scope
+# @param Line: Search key
+#
+# @retval int Index of the line
+# @retval -1 The line is not found
+#
+def GetLineNo(FileContent, Line, IsIgnoreComment=True):
+ LineList = FileContent.splitlines()
+ for Index in range(len(LineList)):
+ if LineList[Index].find(Line) > -1:
+ #
+ # Ignore statement in comment
+ #
+ if IsIgnoreComment:
+ if LineList[Index].strip()[0] == DataType.TAB_COMMENT_SPLIT:
+ continue
+ return Index + 1
+
+ return -1
+
+## RaiseParserError
+#
+# Raise a parser error
+#
+# @param Line: String which has error
+# @param Section: Used for error report
+# @param File: File which has the string
+# @param Format: Correct format
+#
+def RaiseParserError(Line, Section, File, Format='', LineNo= -1):
+ if LineNo == -1:
+ LineNo = GetLineNo(open(os.path.normpath(File), 'r').read(), Line)
+ ErrorMsg = "Invalid statement '%s' is found in section '%s'" % (Line, Section)
+ if Format != '':
+ Format = "Correct format is " + Format
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=File, Line=LineNo, ExtraData=Format, RaiseError=EdkLogger.IsRaiseError)
+
+## WorkspaceFile
+#
+# Return a full path with workspace dir
+#
+# @param WorkspaceDir: Workspace dir
+# @param Filename: Relative file name
+#
+# @retval string A full path
+#
+def WorkspaceFile(WorkspaceDir, Filename):
+ return mws.join(NormPath(WorkspaceDir), NormPath(Filename))
+
+## Split string
+#
+# Remove '"' which startswith and endswith string
+#
+# @param String: The string need to be split
+#
+# @retval String: The string after removed '""'
+#
+def SplitString(String):
+ if String.startswith('\"'):
+ String = String[1:]
+ if String.endswith('\"'):
+ String = String[:-1]
+
+ return String
+
+## Convert To Sql String
+#
+# 1. Replace "'" with "''" in each item of StringList
+#
+# @param StringList: A list for strings to be converted
+#
+def ConvertToSqlString(StringList):
+ return list(map(lambda s: s.replace("'", "''"), StringList))
+
+## Convert To Sql String
+#
+# 1. Replace "'" with "''" in the String
+#
+# @param String: A String to be converted
+#
+def ConvertToSqlString2(String):
+ return String.replace("'", "''")
+
+#
+# Remove comment block
+#
+def RemoveBlockComment(Lines):
+ IsFindBlockComment = False
+ IsFindBlockCode = False
+ ReservedLine = ''
+ NewLines = []
+
+ for Line in Lines:
+ Line = Line.strip()
+ #
+ # Remove comment block
+ #
+ if Line.find(DataType.TAB_COMMENT_EDK_START) > -1:
+ ReservedLine = GetSplitList(Line, DataType.TAB_COMMENT_EDK_START, 1)[0]
+ IsFindBlockComment = True
+ if Line.find(DataType.TAB_COMMENT_EDK_END) > -1:
+ Line = ReservedLine + GetSplitList(Line, DataType.TAB_COMMENT_EDK_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ NewLines.append('')
+ continue
+
+ NewLines.append(Line)
+ return NewLines
+
+#
+# Get String of a List
+#
+def GetStringOfList(List, Split=' '):
+ if not isinstance(List, type([])):
+ return List
+ Str = ''
+ for Item in List:
+ Str = Str + Item + Split
+
+ return Str.strip()
+
+#
+# Get HelpTextList from HelpTextClassList
+#
+def GetHelpTextList(HelpTextClassList):
+ List = []
+ if HelpTextClassList:
+ for HelpText in HelpTextClassList:
+ if HelpText.String.endswith('\n'):
+ HelpText.String = HelpText.String[0: len(HelpText.String) - len('\n')]
+ List.extend(HelpText.String.split('\n'))
+
+ return List
+
+def StringToArray(String):
+ if String.startswith('L"'):
+ if String == "L\"\"":
+ return "{0x00,0x00}"
+ else:
+ return "{%s,0x00,0x00}" % ",".join("0x%02x,0x00" % ord(C) for C in String[2:-1])
+ elif String.startswith('"'):
+ if String == "\"\"":
+ return "{0x00,0x00}"
+ else:
+ StringLen = len(String[1:-1])
+ if StringLen % 2:
+ return "{%s,0x00}" % ",".join("0x%02x" % ord(C) for C in String[1:-1])
+ else:
+ return "{%s,0x00,0x00}" % ",".join("0x%02x" % ord(C) for C in String[1:-1])
+ elif String.startswith('{'):
+ return "{%s}" % ",".join(C.strip() for C in String[1:-1].split(','))
+ else:
+ if len(String.split()) % 2:
+ return '{%s,0}' % ','.join(String.split())
+ else:
+ return '{%s,0,0}' % ','.join(String.split())
+
+def StringArrayLength(String):
+ if String.startswith('L"'):
+ return (len(String) - 3 + 1) * 2
+ elif String.startswith('"'):
+ return (len(String) - 2 + 1)
+ else:
+ return len(String.split()) + 1
+
+def RemoveDupOption(OptionString, Which="/I", Against=None):
+ OptionList = OptionString.split()
+ ValueList = []
+ if Against:
+ ValueList += Against
+ for Index in range(len(OptionList)):
+ Opt = OptionList[Index]
+ if not Opt.startswith(Which):
+ continue
+ if len(Opt) > len(Which):
+ Val = Opt[len(Which):]
+ else:
+ Val = ""
+ if Val in ValueList:
+ OptionList[Index] = ""
+ else:
+ ValueList.append(Val)
+ return " ".join(OptionList)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/TargetTxtClassObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/TargetTxtClassObject.py
new file mode 100755
index 00000000..ff2e4be9
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/TargetTxtClassObject.py
@@ -0,0 +1,199 @@
+## @file
+# This file is used to define each component of Target.txt file
+#
+# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+
+import Common.GlobalData as GlobalData
+import Common.LongFilePathOs as os
+from . import EdkLogger
+from . import DataType
+from .BuildToolError import *
+
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+
+gDefaultTargetTxtFile = "target.txt"
+
+## TargetTxtClassObject
+#
+# This class defined content used in file target.txt
+#
+# @param object: Inherited from object class
+# @param Filename: Input value for full path of target.txt
+#
+# @var TargetTxtDictionary: To store keys and values defined in target.txt
+#
+class TargetTxtClassObject(object):
+ def __init__(self, Filename = None):
+ self.TargetTxtDictionary = {
+ DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM : '',
+ DataType.TAB_TAT_DEFINES_ACTIVE_MODULE : '',
+ DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF : '',
+ DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER : '',
+ DataType.TAB_TAT_DEFINES_TARGET : [],
+ DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG : [],
+ DataType.TAB_TAT_DEFINES_TARGET_ARCH : [],
+ DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF : '',
+ }
+ self.ConfDirectoryPath = ""
+ if Filename is not None:
+ self.LoadTargetTxtFile(Filename)
+
+ ## LoadTargetTxtFile
+ #
+ # Load target.txt file and parse it, return a set structure to store keys and values
+ #
+ # @param Filename: Input value for full path of target.txt
+ #
+ # @retval set() A set structure to store keys and values
+ # @retval 1 Error happenes in parsing
+ #
+ def LoadTargetTxtFile(self, Filename):
+ if os.path.exists(Filename) and os.path.isfile(Filename):
+ return self.ConvertTextFileToDict(Filename, '#', '=')
+ else:
+ EdkLogger.error("Target.txt Parser", FILE_NOT_FOUND, ExtraData=Filename)
+ return 1
+
+ ## ConvertTextFileToDict
+ #
+ # Convert a text file to a dictionary of (name:value) pairs.
+ # The data is saved to self.TargetTxtDictionary
+ #
+ # @param FileName: Text filename
+ # @param CommentCharacter: Comment char, be used to ignore comment content
+ # @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+ #
+ # @retval 0 Convert successfully
+ # @retval 1 Open file failed
+ #
+ def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter):
+ F = None
+ try:
+ F = open(FileName, 'r')
+ self.ConfDirectoryPath = os.path.dirname(FileName)
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=FileName)
+ if F is not None:
+ F.close()
+
+ for Line in F:
+ Line = Line.strip()
+ if Line.startswith(CommentCharacter) or Line == '':
+ continue
+
+ LineList = Line.split(KeySplitCharacter, 1)
+ Key = LineList[0].strip()
+ if len(LineList) == 2:
+ Value = LineList[1].strip()
+ else:
+ Value = ""
+
+ if Key in [DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM, DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF, \
+ DataType.TAB_TAT_DEFINES_ACTIVE_MODULE, DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF]:
+ self.TargetTxtDictionary[Key] = Value.replace('\\', '/')
+ if Key == DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.TargetTxtDictionary[Key]:
+ if self.TargetTxtDictionary[Key].startswith("Conf/"):
+ Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
+ if not os.path.exists(Tools_Def) or not os.path.isfile(Tools_Def):
+ # If Conf/Conf does not exist, try just the Conf/ directory
+ Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].replace("Conf/", "", 1).strip())
+ else:
+ # The File pointed to by TOOL_CHAIN_CONF is not in a Conf/ directory
+ Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
+ self.TargetTxtDictionary[Key] = Tools_Def
+ if Key == DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF and self.TargetTxtDictionary[Key]:
+ if self.TargetTxtDictionary[Key].startswith("Conf/"):
+ Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
+ if not os.path.exists(Build_Rule) or not os.path.isfile(Build_Rule):
+ # If Conf/Conf does not exist, try just the Conf/ directory
+ Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].replace("Conf/", "", 1).strip())
+ else:
+ # The File pointed to by BUILD_RULE_CONF is not in a Conf/ directory
+ Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
+ self.TargetTxtDictionary[Key] = Build_Rule
+ elif Key in [DataType.TAB_TAT_DEFINES_TARGET, DataType.TAB_TAT_DEFINES_TARGET_ARCH, \
+ DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]:
+ self.TargetTxtDictionary[Key] = Value.split()
+ elif Key == DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER:
+ try:
+ V = int(Value, 0)
+ except:
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid number of [%s]: %s." % (Key, Value),
+ File=FileName)
+ self.TargetTxtDictionary[Key] = Value
+ #elif Key not in GlobalData.gGlobalDefines:
+ # GlobalData.gGlobalDefines[Key] = Value
+
+ F.close()
+ return 0
+
+## TargetTxtDict
+#
+# Load target.txt in input Conf dir
+#
+# @param ConfDir: Conf dir
+#
+# @retval Target An instance of TargetTxtClassObject() with loaded target.txt
+#
+
+class TargetTxtDict():
+
+ def __new__(cls, *args, **kw):
+ if not hasattr(cls, '_instance'):
+ orig = super(TargetTxtDict, cls)
+ cls._instance = orig.__new__(cls, *args, **kw)
+ return cls._instance
+
+ def __init__(self):
+ if not hasattr(self, 'Target'):
+ self.TxtTarget = None
+
+ @property
+ def Target(self):
+ if not self.TxtTarget:
+ self._GetTarget()
+ return self.TxtTarget
+
+ def _GetTarget(self):
+ Target = TargetTxtClassObject()
+ ConfDirectory = GlobalData.gCmdConfDir
+ if ConfDirectory:
+ # Get alternate Conf location, if it is absolute, then just use the absolute directory name
+ ConfDirectoryPath = os.path.normpath(ConfDirectory)
+
+ if not os.path.isabs(ConfDirectoryPath):
+ # Since alternate directory name is not absolute, the alternate directory is located within the WORKSPACE
+ # This also handles someone specifying the Conf directory in the workspace. Using --conf=Conf
+ ConfDirectoryPath = mws.join(os.environ["WORKSPACE"], ConfDirectoryPath)
+ else:
+ if "CONF_PATH" in os.environ:
+ ConfDirectoryPath = os.path.normcase(os.path.normpath(os.environ["CONF_PATH"]))
+ else:
+ # Get standard WORKSPACE/Conf use the absolute path to the WORKSPACE/Conf
+ ConfDirectoryPath = mws.join(os.environ["WORKSPACE"], 'Conf')
+ GlobalData.gConfDirectory = ConfDirectoryPath
+ targettxt = os.path.normpath(os.path.join(ConfDirectoryPath, gDefaultTargetTxtFile))
+ if os.path.exists(targettxt):
+ Target.LoadTargetTxtFile(targettxt)
+ self.TxtTarget = Target
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
+ Target = TargetTxtDict(os.getenv("WORKSPACE"))
+ print(Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER])
+ print(Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TARGET])
+ print(Target.TargetTxtDictionary)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/ToolDefClassObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/ToolDefClassObject.py
new file mode 100755
index 00000000..2319a7c4
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/ToolDefClassObject.py
@@ -0,0 +1,290 @@
+## @file
+# This file is used to define each component of tools_def.txt file
+#
+# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.LongFilePathOs as os
+import re
+from . import EdkLogger
+
+from .BuildToolError import *
+from Common.TargetTxtClassObject import TargetTxtDict
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.Misc import PathClass
+from Common.StringUtils import NormPath
+import Common.GlobalData as GlobalData
+from Common import GlobalData
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from .DataType import TAB_TOD_DEFINES_TARGET, TAB_TOD_DEFINES_TOOL_CHAIN_TAG,\
+ TAB_TOD_DEFINES_TARGET_ARCH, TAB_TOD_DEFINES_COMMAND_TYPE\
+ , TAB_TOD_DEFINES_FAMILY, TAB_TOD_DEFINES_BUILDRULEFAMILY,\
+ TAB_STAR, TAB_TAT_DEFINES_TOOL_CHAIN_CONF
+
+
+##
+# Static variables used for pattern
+#
+gMacroRefPattern = re.compile('(DEF\([^\(\)]+\))')
+gEnvRefPattern = re.compile('(ENV\([^\(\)]+\))')
+gMacroDefPattern = re.compile("DEFINE\s+([^\s]+)")
+gDefaultToolsDefFile = "tools_def.txt"
+
+## ToolDefClassObject
+#
+# This class defined content used in file tools_def.txt
+#
+# @param object: Inherited from object class
+# @param Filename: Input value for full path of tools_def.txt
+#
+# @var ToolsDefTxtDictionary: To store keys and values defined in target.txt
+# @var MacroDictionary: To store keys and values defined in DEFINE statement
+#
+class ToolDefClassObject(object):
+ def __init__(self, FileName=None):
+ self.ToolsDefTxtDictionary = {}
+ self.MacroDictionary = {}
+ for Env in os.environ:
+ self.MacroDictionary["ENV(%s)" % Env] = os.environ[Env]
+
+ if FileName is not None:
+ self.LoadToolDefFile(FileName)
+
+ ## LoadToolDefFile
+ #
+ # Load target.txt file and parse it
+ #
+ # @param Filename: Input value for full path of tools_def.txt
+ #
+ def LoadToolDefFile(self, FileName):
+ # set multiple workspace
+ PackagesPath = os.getenv("PACKAGES_PATH")
+ mws.setWs(GlobalData.gWorkspace, PackagesPath)
+
+ self.ToolsDefTxtDatabase = {
+ TAB_TOD_DEFINES_TARGET : [],
+ TAB_TOD_DEFINES_TOOL_CHAIN_TAG : [],
+ TAB_TOD_DEFINES_TARGET_ARCH : [],
+ TAB_TOD_DEFINES_COMMAND_TYPE : []
+ }
+
+ self.IncludeToolDefFile(FileName)
+
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET]))
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG]))
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH]))
+
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE]))
+
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET].sort()
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG].sort()
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH].sort()
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE].sort()
+
+ ## IncludeToolDefFile
+ #
+ # Load target.txt file and parse it as if its contents were inside the main file
+ #
+ # @param Filename: Input value for full path of tools_def.txt
+ #
+ def IncludeToolDefFile(self, FileName):
+ FileContent = []
+ if os.path.isfile(FileName):
+ try:
+ F = open(FileName, 'r')
+ FileContent = F.readlines()
+ except:
+ EdkLogger.error("tools_def.txt parser", FILE_OPEN_FAILURE, ExtraData=FileName)
+ else:
+ EdkLogger.error("tools_def.txt parser", FILE_NOT_FOUND, ExtraData=FileName)
+
+ for Index in range(len(FileContent)):
+ Line = FileContent[Index].strip()
+ if Line == "" or Line[0] == '#':
+ continue
+
+ if Line.startswith("!include"):
+ IncFile = Line[8:].strip()
+ Done, IncFile = self.ExpandMacros(IncFile)
+ if not Done:
+ EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE,
+ "Macro or Environment has not been defined",
+ ExtraData=IncFile[4:-1], File=FileName, Line=Index+1)
+ IncFile = NormPath(IncFile)
+
+ if not os.path.isabs(IncFile):
+ #
+ # try WORKSPACE
+ #
+ IncFileTmp = PathClass(IncFile, GlobalData.gWorkspace)
+ ErrorCode = IncFileTmp.Validate()[0]
+ if ErrorCode != 0:
+ #
+ # try PACKAGES_PATH
+ #
+ IncFileTmp = mws.join(GlobalData.gWorkspace, IncFile)
+ if not os.path.exists(IncFileTmp):
+ #
+ # try directory of current file
+ #
+ IncFileTmp = PathClass(IncFile, os.path.dirname(FileName))
+ ErrorCode = IncFileTmp.Validate()[0]
+ if ErrorCode != 0:
+ EdkLogger.error("tools_def.txt parser", FILE_NOT_FOUND, ExtraData=IncFile)
+
+ if isinstance(IncFileTmp, PathClass):
+ IncFile = IncFileTmp.Path
+ else:
+ IncFile = IncFileTmp
+
+ self.IncludeToolDefFile(IncFile)
+ continue
+
+ NameValuePair = Line.split("=", 1)
+ if len(NameValuePair) != 2:
+ EdkLogger.warn("tools_def.txt parser", "Line %d: not correct assignment statement, skipped" % (Index + 1))
+ continue
+
+ Name = NameValuePair[0].strip()
+ Value = NameValuePair[1].strip()
+
+ if Name == "IDENTIFIER":
+ EdkLogger.debug(EdkLogger.DEBUG_8, "Line %d: Found identifier statement, skipped: %s" % ((Index + 1), Value))
+ continue
+
+ MacroDefinition = gMacroDefPattern.findall(Name)
+ if MacroDefinition != []:
+ Done, Value = self.ExpandMacros(Value)
+ if not Done:
+ EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE,
+ "Macro or Environment has not been defined",
+ ExtraData=Value[4:-1], File=FileName, Line=Index+1)
+
+ MacroName = MacroDefinition[0].strip()
+ self.MacroDictionary["DEF(%s)" % MacroName] = Value
+ EdkLogger.debug(EdkLogger.DEBUG_8, "Line %d: Found macro: %s = %s" % ((Index + 1), MacroName, Value))
+ continue
+
+ Done, Value = self.ExpandMacros(Value)
+ if not Done:
+ EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE,
+ "Macro or Environment has not been defined",
+ ExtraData=Value[4:-1], File=FileName, Line=Index+1)
+
+ List = Name.split('_')
+ if len(List) != 5:
+ EdkLogger.verbose("Line %d: Not a valid name of definition: %s" % ((Index + 1), Name))
+ continue
+ elif List[4] == TAB_STAR:
+ EdkLogger.verbose("Line %d: '*' is not allowed in last field: %s" % ((Index + 1), Name))
+ continue
+ else:
+ self.ToolsDefTxtDictionary[Name] = Value
+ if List[0] != TAB_STAR:
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] += [List[0]]
+ if List[1] != TAB_STAR:
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] += [List[1]]
+ if List[2] != TAB_STAR:
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] += [List[2]]
+ if List[3] != TAB_STAR:
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] += [List[3]]
+ if List[4] == TAB_TOD_DEFINES_FAMILY and List[2] == TAB_STAR and List[3] == TAB_STAR:
+ if TAB_TOD_DEFINES_FAMILY not in self.ToolsDefTxtDatabase:
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY] = {}
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] = Value
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY] = {}
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
+ elif List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]:
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] = Value
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
+ elif self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] != Value:
+ EdkLogger.verbose("Line %d: No override allowed for the family of a tool chain: %s" % ((Index + 1), Name))
+ if List[4] == TAB_TOD_DEFINES_BUILDRULEFAMILY and List[2] == TAB_STAR and List[3] == TAB_STAR:
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY not in self.ToolsDefTxtDatabase \
+ or List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]:
+ EdkLogger.verbose("Line %d: The family is not specified, but BuildRuleFamily is specified for the tool chain: %s" % ((Index + 1), Name))
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
+
+ ## ExpandMacros
+ #
+ # Replace defined macros with real value
+ #
+ # @param Value: The string with unreplaced macros
+ #
+ # @retval Value: The string which has been replaced with real value
+ #
+ def ExpandMacros(self, Value):
+ # os.environ contains all environment variables uppercase on Windows which cause the key in the self.MacroDictionary is uppercase, but Ref may not
+ EnvReference = gEnvRefPattern.findall(Value)
+ for Ref in EnvReference:
+ if Ref not in self.MacroDictionary and Ref.upper() not in self.MacroDictionary:
+ Value = Value.replace(Ref, "")
+ else:
+ if Ref in self.MacroDictionary:
+ Value = Value.replace(Ref, self.MacroDictionary[Ref])
+ else:
+ Value = Value.replace(Ref, self.MacroDictionary[Ref.upper()])
+ MacroReference = gMacroRefPattern.findall(Value)
+ for Ref in MacroReference:
+ if Ref not in self.MacroDictionary:
+ return False, Ref
+ Value = Value.replace(Ref, self.MacroDictionary[Ref])
+
+ return True, Value
+
+## ToolDefDict
+#
+# Load tools_def.txt in input Conf dir
+#
+# @param ConfDir: Conf dir
+#
+# @retval ToolDef An instance of ToolDefClassObject() with loaded tools_def.txt
+#
+
+
+class ToolDefDict():
+
+ def __new__(cls, ConfDir, *args, **kw):
+ if not hasattr(cls, '_instance'):
+ orig = super(ToolDefDict, cls)
+ cls._instance = orig.__new__(cls, *args, **kw)
+ return cls._instance
+
+ def __init__(self, ConfDir):
+ self.ConfDir = ConfDir
+ if not hasattr(self, 'ToolDef'):
+ self._ToolDef = None
+
+ @property
+ def ToolDef(self):
+ if not self._ToolDef:
+ self._GetToolDef()
+ return self._ToolDef
+
+ def _GetToolDef(self):
+ TargetObj = TargetTxtDict()
+ Target = TargetObj.Target
+ ToolDef = ToolDefClassObject()
+ if TAB_TAT_DEFINES_TOOL_CHAIN_CONF in Target.TargetTxtDictionary:
+ ToolsDefFile = Target.TargetTxtDictionary[TAB_TAT_DEFINES_TOOL_CHAIN_CONF]
+ if ToolsDefFile:
+ ToolDef.LoadToolDefFile(os.path.normpath(ToolsDefFile))
+ else:
+ ToolDef.LoadToolDefFile(os.path.normpath(os.path.join(self.ConfDir, gDefaultToolsDefFile)))
+ else:
+ ToolDef.LoadToolDefFile(os.path.normpath(os.path.join(self.ConfDir, gDefaultToolsDefFile)))
+ self._ToolDef = ToolDef
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ ToolDef = ToolDefDict(os.getenv("WORKSPACE"))
+ pass
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/CapsuleDependency.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/CapsuleDependency.py
new file mode 100755
index 00000000..96cf4b40
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/CapsuleDependency.py
@@ -0,0 +1,409 @@
+## @file
+# Module that encodes and decodes a capsule dependency.
+#
+# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+import struct
+import json
+import sys
+import uuid
+import re
+
+'''
+CapsuleDependency
+'''
+
+class OpConvert (object):
+ def __init__ (self):
+ # Opcode: (OperandSize, PackSize, PackFmt, EncodeConvert, DecodeConvert)
+ self._DepexOperations = {0x00: (16, 16, 's', self.Str2Guid, self.Guid2Str),
+ 0x01: (4, 1, 'I', self.Str2Uint, self.Uint2Str),
+ 0x02: (1, 0, 's', self.Str2Utf8, self.Byte2Str),
+ }
+
+ def Str2Uint (self, Data):
+ try:
+ Value = int (Data, 16)
+ except:
+ Message = '{Data} is not a valid integer value.'.format (Data = Data)
+ raise ValueError (Message)
+ if Value < 0 or Value > 0xFFFFFFFF:
+ Message = '{Data} is not an UINT32.'.format (Data = Data)
+ raise ValueError (Message)
+ return Value
+
+ def Uint2Str (self, Data):
+ if Data < 0 or Data > 0xFFFFFFFF:
+ Message = '{Data} is not an UINT32.'.format (Data = Data)
+ raise ValueError (Message)
+ return "0x{Data:08x}".format (Data = Data)
+
+ def Str2Guid (self, Data):
+ try:
+ Guid = uuid.UUID (Data)
+ except:
+ Message = '{Data} is not a valid registry format GUID value.'.format (Data = Data)
+ raise ValueError (Message)
+ return Guid.bytes_le
+
+ def Guid2Str (self, Data):
+ try:
+ Guid = uuid.UUID (bytes_le = Data)
+ except:
+ Message = '{Data} is not a valid binary format GUID value.'.format (Data = Data)
+ raise ValueError (Message)
+ return str (Guid).upper ()
+
+ def Str2Utf8 (self, Data):
+ if isinstance (Data, str):
+ return Data.encode ('utf-8')
+ else:
+ Message = '{Data} is not a valid string.'.format (Data = Data)
+ raise ValueError (Message)
+
+ def Byte2Str (self, Data):
+ if isinstance (Data, bytes):
+ if Data[-1:] == b'\x00':
+ return str (Data[:-1], 'utf-8')
+ else:
+ return str (Data, 'utf-8')
+ else:
+ Message = '{Data} is not a valid binary string.'.format (Data = Data)
+ raise ValueError (Message)
+
+ def OpEncode (self, Opcode, Operand = None):
+ BinTemp = struct.pack ('<b', Opcode)
+ if Opcode <= 0x02 and Operand != None:
+ OperandSize, PackSize, PackFmt, EncodeConvert, DecodeConvert = self._DepexOperations[Opcode]
+ Value = EncodeConvert (Operand)
+ if Opcode == 0x02:
+ PackSize = len (Value) + 1
+ BinTemp += struct.pack ('<{PackSize}{PackFmt}'.format (PackSize = PackSize, PackFmt = PackFmt), Value)
+ return BinTemp
+
+ def OpDecode (self, Buffer):
+ Opcode = struct.unpack ('<b', Buffer[0:1])[0]
+ if Opcode <= 0x02:
+ OperandSize, PackSize, PackFmt, EncodeConvert, DecodeConvert = self._DepexOperations[Opcode]
+ if Opcode == 0x02:
+ try:
+ PackSize = Buffer[1:].index (b'\x00') + 1
+ OperandSize = PackSize
+ except:
+ Message = 'CapsuleDependency: OpConvert: error: decode failed with wrong opcode/string.'
+ raise ValueError (Message)
+ try:
+ Operand = DecodeConvert (struct.unpack ('<{PackSize}{PackFmt}'.format (PackSize = PackSize, PackFmt = PackFmt), Buffer[1:1+OperandSize])[0])
+ except:
+ Message = 'CapsuleDependency: OpConvert: error: decode failed with unpack failure.'
+ raise ValueError (Message)
+ else:
+ Operand = None
+ OperandSize = 0
+ return (Opcode, Operand, OperandSize)
+
+class CapsuleDependencyClass (object):
+ # //**************************************************************
+ # // Image Attribute - Dependency
+ # //**************************************************************
+ # typedef struct {
+ # UINT8 Dependencies[];
+ # } EFI_FIRMWARE_IMAGE_DEP
+
+ # {expression operator : [precedence, opcode, type (1:unary/2:binocular)]}
+ _opReference = {'&&': [2, 0x03, 2],
+ '||': [1, 0x04, 2],
+ '~': [5, 0x05, 1],
+ '==': [3, 0x08, 2],
+ '>': [4, 0x09, 2],
+ '>=': [4, 0x0A, 2],
+ '<': [4, 0x0B, 2],
+ '<=': [4, 0x0C, 2],
+ }
+
+ def __init__ (self):
+ self.Payload = b''
+ self._DepexExp = None
+ self._DepexList = []
+ self._DepexDump = []
+ self.Depex = b''
+ self._Valid = False
+ self._DepexSize = 0
+ self._opReferenceReverse = {v[1] : k for k, v in self._opReference.items ()}
+ self.OpConverter = OpConvert ()
+
+ @property
+ def DepexExp (self):
+ return self._DepexExp
+
+ @DepexExp.setter
+ def DepexExp (self, DepexExp = ''):
+ if isinstance (DepexExp, str):
+ DepexExp = re.sub (r'\n',r' ',DepexExp)
+ DepexExp = re.sub (r'\(',r' ( ',DepexExp)
+ DepexExp = re.sub (r'\)',r' ) ',DepexExp)
+ DepexExp = re.sub (r'~',r' ~ ',DepexExp)
+ self._DepexList = re.findall(r"[^\s\"\']+|\"[^\"]*\"|\'[^\']*\'",DepexExp)
+ self._DepexExp = " ".join(self._DepexList)
+
+ else:
+ Msg = 'Input Depex Expression is not valid string.'
+ raise ValueError (Msg)
+
+ def IsValidOperator (self, op):
+ return op in self._opReference.keys ()
+
+ def IsValidUnaryOperator (self, op):
+ return op in self._opReference.keys () and self._opReference[op][2] == 1
+
+ def IsValidBinocularOperator (self, op):
+ return op in self._opReference.keys () and self._opReference[op][2] == 2
+
+ def IsValidGuid (self, operand):
+ try:
+ uuid.UUID (operand)
+ except:
+ return False
+ return True
+
+ def IsValidVersion (self, operand):
+ try:
+ Value = int (operand, 16)
+ if Value < 0 or Value > 0xFFFFFFFF:
+ return False
+ except:
+ return False
+ return True
+
+ def IsValidBoolean (self, operand):
+ try:
+ return operand.upper () in ['TRUE', 'FALSE']
+ except:
+ return False
+
+ def IsValidOperand (self, operand):
+ return self.IsValidVersion (operand) or self.IsValidGuid (operand) or self.IsValidBoolean (operand)
+
+ def IsValidString (self, operand):
+ return operand[0] == "\"" and operand[-1] == "\"" and len(operand) >= 2
+
+ # Check if priority of current operater is greater than pervious op
+ def PriorityNotGreater (self, prevOp, currOp):
+ return self._opReference[currOp][0] <= self._opReference[prevOp][0]
+
+ def ValidateDepex (self):
+ OpList = self._DepexList
+
+ i = 0
+ while i < len (OpList):
+ Op = OpList[i]
+
+ if Op == 'DECLARE':
+ i += 1
+ if i >= len (OpList):
+ Msg = 'No more Operand after {Op}.'.format (Op = OpList[i-1])
+ raise IndexError (Msg)
+ # Check valid string
+ if not self.IsValidString(OpList[i]):
+ Msg = '{Operand} after {Op} is not a valid expression input.'.format (Operand = OpList[i], Op = OpList[i-1])
+ raise ValueError (Msg)
+
+ elif Op == '(':
+ # Expression cannot end with (
+ if i == len (OpList) - 1:
+ Msg = 'Expression cannot end with \'(\''
+ raise ValueError (Msg)
+ # The previous op after '(' cannot be a binocular operator
+ if self.IsValidBinocularOperator (OpList[i+1]) :
+ Msg = '{Op} after \'(\' is not a valid expression input.'.format (Op = OpList[i+1])
+ raise ValueError (Msg)
+
+ elif Op == ')':
+ # Expression cannot start with )
+ if i == 0:
+ Msg = 'Expression cannot start with \')\''
+ raise ValueError (Msg)
+ # The previous op before ')' cannot be an operator
+ if self.IsValidOperator (OpList[i-1]):
+ Msg = '{Op} before \')\' is not a valid expression input.'.format (Op = OpList[i-1])
+ raise ValueError (Msg)
+ # The next op after ')' cannot be operand or unary operator
+ if (i + 1) < len (OpList) and (self.IsValidOperand (OpList[i+1]) or self.IsValidUnaryOperator (OpList[i+1])):
+ Msg = '{Op} after \')\' is not a valid expression input.'.format (Op = OpList[i+1])
+ raise ValueError (Msg)
+
+ elif self.IsValidOperand (Op):
+ # The next expression of operand cannot be operand or unary operator
+ if (i + 1) < len (OpList) and (self.IsValidOperand (OpList[i+1]) or self.IsValidUnaryOperator (OpList[i+1])):
+ Msg = '{Op} after {PrevOp} is not a valid expression input.'.format (Op = OpList[i+1], PrevOp = Op)
+ raise ValueError (Msg)
+
+ elif self.IsValidOperator (Op):
+ # The next op of operator cannot binocular operator
+ if (i + 1) < len (OpList) and self.IsValidBinocularOperator (OpList[i+1]):
+ Msg = '{Op} after {PrevOp} is not a valid expression input.'.format (Op = OpList[i+1], PrevOp = Op)
+ raise ValueError (Msg)
+ # The first op can not be binocular operator
+ if i == 0 and self.IsValidBinocularOperator (Op):
+ Msg = 'Expression cannot start with an operator {Op}.'.format (Op = Op)
+ raise ValueError (Msg)
+ # The last op can not be operator
+ if i == len (OpList) - 1:
+ Msg = 'Expression cannot ended with an operator {Op}.'.format (Op = Op)
+ raise ValueError (Msg)
+ # The next op of unary operator cannot be guid / version
+ if self.IsValidUnaryOperator (Op) and (self.IsValidGuid (OpList[i+1]) or self.IsValidVersion (OpList[i+1])):
+ Msg = '{Op} after {PrevOp} is not a valid expression input.'.format (Op = OpList[i+1], PrevOp = Op)
+ raise ValueError (Msg)
+
+ else:
+ Msg = '{Op} is not a valid expression input.'.format (Op = Op)
+ raise ValueError (Msg)
+ i += 1
+
+ def Encode (self):
+ # initialize
+ self.Depex = b''
+ self._DepexDump = []
+ OperandStack = []
+ OpeartorStack = []
+ OpList = self._DepexList
+
+ self.ValidateDepex ()
+
+ # convert
+ i = 0
+ while i < len (OpList):
+ Op = OpList[i]
+ if Op == 'DECLARE':
+ # This declare next expression value is a VERSION_STRING
+ i += 1
+ self.Depex += self.OpConverter.OpEncode (0x02, OpList[i][1:-1])
+
+ elif Op == '(':
+ OpeartorStack.append (Op)
+
+ elif Op == ')':
+ while (OpeartorStack and OpeartorStack[-1] != '('):
+ Operator = OpeartorStack.pop ()
+ self.Depex += self.OpConverter.OpEncode (self._opReference[Operator][1])
+ try:
+ OpeartorStack.pop () # pop out '('
+ except:
+ Msg = 'Pop out \'(\' failed, too many \')\''
+ raise ValueError (Msg)
+
+ elif self.IsValidGuid (Op):
+ if not OperandStack:
+ OperandStack.append (self.OpConverter.OpEncode (0x00, Op))
+ else:
+ # accroding to uefi spec 2.8, the guid/version operands is a reversed order in firmware comparison.
+ self.Depex += self.OpConverter.OpEncode (0x00, Op)
+ self.Depex += OperandStack.pop ()
+
+ elif self.IsValidVersion (Op):
+ if not OperandStack:
+ OperandStack.append (self.OpConverter.OpEncode (0x01, Op))
+ else:
+ # accroding to uefi spec 2.8, the guid/version operands is a reversed order in firmware comparison.
+ self.Depex += self.OpConverter.OpEncode (0x01, Op)
+ self.Depex += OperandStack.pop ()
+
+ elif self.IsValidBoolean (Op):
+ if Op.upper () == 'FALSE':
+ self.Depex += self.OpConverter.OpEncode (0x07)
+ elif Op.upper () == 'TRUE':
+ self.Depex += self.OpConverter.OpEncode (0x06)
+
+ elif self.IsValidOperator (Op):
+ while (OpeartorStack and OpeartorStack[-1] != '(' and self.PriorityNotGreater (OpeartorStack[-1], Op)):
+ Operator = OpeartorStack.pop ()
+ self.Depex += self.OpConverter.OpEncode (self._opReference[Operator][1])
+ OpeartorStack.append (Op)
+
+ i += 1
+
+ while OpeartorStack:
+ Operator = OpeartorStack.pop ()
+ if Operator == '(':
+ Msg = 'Too many \'(\'.'
+ raise ValueError (Msg)
+ self.Depex += self.OpConverter.OpEncode (self._opReference[Operator][1])
+ self.Depex += self.OpConverter.OpEncode (0x0D)
+
+ self._Valid = True
+ self._DepexSize = len (self.Depex)
+ return self.Depex + self.Payload
+
+ def Decode (self, Buffer):
+ # initialize
+ self.Depex = Buffer
+ OperandStack = []
+ DepexLen = 0
+
+ while True:
+ Opcode, Operand, OperandSize = self.OpConverter.OpDecode (Buffer[DepexLen:])
+ DepexLen += OperandSize + 1
+
+ if Opcode == 0x0D:
+ break
+
+ elif Opcode == 0x02:
+ if not OperandStack:
+ OperandStack.append ('DECLARE \"{String}\"'.format (String = Operand))
+ else:
+ PrevOperand = OperandStack.pop ()
+ OperandStack.append ('{Operand} DECLARE \"{String}\"'.format (Operand = PrevOperand, String = Operand))
+
+ elif Opcode in [0x00, 0x01]:
+ OperandStack.append (Operand)
+
+ elif Opcode == 0x06:
+ OperandStack.append ('TRUE')
+
+ elif Opcode == 0x07:
+ OperandStack.append ('FALSE')
+
+ elif self.IsValidOperator (self._opReferenceReverse[Opcode]):
+ Operator = self._opReferenceReverse[Opcode]
+ if self.IsValidUnaryOperator (self._opReferenceReverse[Opcode]) and len (OperandStack) >= 1:
+ Oprand = OperandStack.pop ()
+ OperandStack.append (' ( {Operator} {Oprand} )'.format (Operator = Operator, Oprand = Oprand))
+ elif self.IsValidBinocularOperator (self._opReferenceReverse[Opcode]) and len (OperandStack) >= 2:
+ Oprand1 = OperandStack.pop ()
+ Oprand2 = OperandStack.pop ()
+ OperandStack.append (' ( {Oprand1} {Operator} {Oprand2} )'.format (Operator = Operator, Oprand1 = Oprand1, Oprand2 = Oprand2))
+ else:
+ Msg = 'No enough Operands for {Opcode:02X}.'.format (Opcode = Opcode)
+ raise ValueError (Msg)
+
+ else:
+ Msg = '{Opcode:02X} is not a valid OpCode.'.format (Opcode = Opcode)
+ raise ValueError (Msg)
+
+ self.DepexExp = OperandStack[0].strip (' ')
+ self.Payload = Buffer[DepexLen:]
+ self._Valid = True
+ self._DepexSize = DepexLen
+ return self.Payload
+
+
+ def DumpInfo (self):
+ DepexLen = 0
+ Opcode = None
+ Buffer = self.Depex
+
+ if self._Valid == True:
+ print ('EFI_FIRMWARE_IMAGE_DEP.Dependencies = {')
+ while Opcode != 0x0D:
+ Opcode, Operand, OperandSize = self.OpConverter.OpDecode (Buffer[DepexLen:])
+ DepexLen += OperandSize + 1
+ if Operand:
+ print (' {Opcode:02X}, {Operand},'.format (Opcode = Opcode, Operand = Operand))
+ else:
+ print (' {Opcode:02X},'.format (Opcode = Opcode))
+ print ('}')
+
+ print ('sizeof (EFI_FIRMWARE_IMAGE_DEP.Dependencies) = {Size:08X}'.format (Size = self._DepexSize))
+ print ('sizeof (Payload) = {Size:08X}'.format (Size = len (self.Payload)))
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/FmpAuthHeader.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/FmpAuthHeader.py
new file mode 100755
index 00000000..cd64cc7a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/FmpAuthHeader.py
@@ -0,0 +1,190 @@
+## @file
+# Module that encodes and decodes a EFI_FIRMWARE_IMAGE_AUTHENTICATION with
+# certificate data and payload data.
+#
+# Copyright (c) 2018 - 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+FmpAuthHeader
+'''
+
+import struct
+import uuid
+
+class FmpAuthHeaderClass (object):
+ # ///
+ # /// Image Attribute -Authentication Required
+ # ///
+ # typedef struct {
+ # ///
+ # /// It is included in the signature of AuthInfo. It is used to ensure freshness/no replay.
+ # /// It is incremented during each firmware image operation.
+ # ///
+ # UINT64 MonotonicCount;
+ # ///
+ # /// Provides the authorization for the firmware image operations. It is a signature across
+ # /// the image data and the Monotonic Count value. Caller uses the private key that is
+ # /// associated with a public key that has been provisioned via the key exchange.
+ # /// Because this is defined as a signature, WIN_CERTIFICATE_UEFI_GUID.CertType must
+ # /// be EFI_CERT_TYPE_PKCS7_GUID.
+ # ///
+ # WIN_CERTIFICATE_UEFI_GUID AuthInfo;
+ # } EFI_FIRMWARE_IMAGE_AUTHENTICATION;
+ #
+ # ///
+ # /// Certificate which encapsulates a GUID-specific digital signature
+ # ///
+ # typedef struct {
+ # ///
+ # /// This is the standard WIN_CERTIFICATE header, where
+ # /// wCertificateType is set to WIN_CERT_TYPE_EFI_GUID.
+ # ///
+ # WIN_CERTIFICATE Hdr;
+ # ///
+ # /// This is the unique id which determines the
+ # /// format of the CertData. .
+ # ///
+ # EFI_GUID CertType;
+ # ///
+ # /// The following is the certificate data. The format of
+ # /// the data is determined by the CertType.
+ # /// If CertType is EFI_CERT_TYPE_RSA2048_SHA256_GUID,
+ # /// the CertData will be EFI_CERT_BLOCK_RSA_2048_SHA256 structure.
+ # ///
+ # UINT8 CertData[1];
+ # } WIN_CERTIFICATE_UEFI_GUID;
+ #
+ # ///
+ # /// The WIN_CERTIFICATE structure is part of the PE/COFF specification.
+ # ///
+ # typedef struct {
+ # ///
+ # /// The length of the entire certificate,
+ # /// including the length of the header, in bytes.
+ # ///
+ # UINT32 dwLength;
+ # ///
+ # /// The revision level of the WIN_CERTIFICATE
+ # /// structure. The current revision level is 0x0200.
+ # ///
+ # UINT16 wRevision;
+ # ///
+ # /// The certificate type. See WIN_CERT_TYPE_xxx for the UEFI
+ # /// certificate types. The UEFI specification reserves the range of
+ # /// certificate type values from 0x0EF0 to 0x0EFF.
+ # ///
+ # UINT16 wCertificateType;
+ # ///
+ # /// The following is the actual certificate. The format of
+ # /// the certificate depends on wCertificateType.
+ # ///
+ # /// UINT8 bCertificate[ANYSIZE_ARRAY];
+ # ///
+ # } WIN_CERTIFICATE;
+ #
+ # #define WIN_CERT_TYPE_EFI_GUID 0x0EF1
+ #
+ # ///
+ # /// This identifies a signature containing a DER-encoded PKCS #7 version 1.5 [RFC2315]
+ # /// SignedData value.
+ # ///
+ # #define EFI_CERT_TYPE_PKCS7_GUID \
+ # { \
+ # 0x4aafd29d, 0x68df, 0x49ee, {0x8a, 0xa9, 0x34, 0x7d, 0x37, 0x56, 0x65, 0xa7} \
+ # }
+
+ _StructFormat = '<QIHH16s'
+ _StructSize = struct.calcsize (_StructFormat)
+
+ _MonotonicCountFormat = '<Q'
+ _MonotonicCountSize = struct.calcsize (_MonotonicCountFormat)
+
+ _StructAuthInfoFormat = '<IHH16s'
+ _StructAuthInfoSize = struct.calcsize (_StructAuthInfoFormat)
+
+ _WIN_CERT_REVISION = 0x0200
+ _WIN_CERT_TYPE_EFI_GUID = 0x0EF1
+ _EFI_CERT_TYPE_PKCS7_GUID = uuid.UUID ('4aafd29d-68df-49ee-8aa9-347d375665a7')
+
+ def __init__ (self):
+ self._Valid = False
+ self.MonotonicCount = 0
+ self.dwLength = self._StructAuthInfoSize
+ self.wRevision = self._WIN_CERT_REVISION
+ self.wCertificateType = self._WIN_CERT_TYPE_EFI_GUID
+ self.CertType = self._EFI_CERT_TYPE_PKCS7_GUID
+ self.CertData = b''
+ self.Payload = b''
+
+
+ def Encode (self):
+ if self.wRevision != self._WIN_CERT_REVISION:
+ raise ValueError
+ if self.wCertificateType != self._WIN_CERT_TYPE_EFI_GUID:
+ raise ValueError
+ if self.CertType != self._EFI_CERT_TYPE_PKCS7_GUID:
+ raise ValueError
+ self.dwLength = self._StructAuthInfoSize + len (self.CertData)
+
+ FmpAuthHeader = struct.pack (
+ self._StructFormat,
+ self.MonotonicCount,
+ self.dwLength,
+ self.wRevision,
+ self.wCertificateType,
+ self.CertType.bytes_le
+ )
+ self._Valid = True
+
+ return FmpAuthHeader + self.CertData + self.Payload
+
+ def Decode (self, Buffer):
+ if len (Buffer) < self._StructSize:
+ raise ValueError
+ (MonotonicCount, dwLength, wRevision, wCertificateType, CertType) = \
+ struct.unpack (
+ self._StructFormat,
+ Buffer[0:self._StructSize]
+ )
+ if dwLength < self._StructAuthInfoSize:
+ raise ValueError
+ if wRevision != self._WIN_CERT_REVISION:
+ raise ValueError
+ if wCertificateType != self._WIN_CERT_TYPE_EFI_GUID:
+ raise ValueError
+ if CertType != self._EFI_CERT_TYPE_PKCS7_GUID.bytes_le:
+ raise ValueError
+ self.MonotonicCount = MonotonicCount
+ self.dwLength = dwLength
+ self.wRevision = wRevision
+ self.wCertificateType = wCertificateType
+ self.CertType = uuid.UUID (bytes_le = CertType)
+ self.CertData = Buffer[self._StructSize:self._MonotonicCountSize + self.dwLength]
+ self.Payload = Buffer[self._MonotonicCountSize + self.dwLength:]
+ self._Valid = True
+ return self.Payload
+
+ def IsSigned (self, Buffer):
+ if len (Buffer) < self._StructSize:
+ return False
+ (MonotonicCount, dwLength, wRevision, wCertificateType, CertType) = \
+ struct.unpack (
+ self._StructFormat,
+ Buffer[0:self._StructSize]
+ )
+ if CertType != self._EFI_CERT_TYPE_PKCS7_GUID.bytes_le:
+ return False
+ return True
+
+ def DumpInfo (self):
+ if not self._Valid:
+ raise ValueError
+ print ('EFI_FIRMWARE_IMAGE_AUTHENTICATION.MonotonicCount = {MonotonicCount:016X}'.format (MonotonicCount = self.MonotonicCount))
+ print ('EFI_FIRMWARE_IMAGE_AUTHENTICATION.AuthInfo.Hdr.dwLength = {dwLength:08X}'.format (dwLength = self.dwLength))
+ print ('EFI_FIRMWARE_IMAGE_AUTHENTICATION.AuthInfo.Hdr.wRevision = {wRevision:04X}'.format (wRevision = self.wRevision))
+ print ('EFI_FIRMWARE_IMAGE_AUTHENTICATION.AuthInfo.Hdr.wCertificateType = {wCertificateType:04X}'.format (wCertificateType = self.wCertificateType))
+ print ('EFI_FIRMWARE_IMAGE_AUTHENTICATION.AuthInfo.CertType = {Guid}'.format (Guid = str(self.CertType).upper()))
+ print ('sizeof (EFI_FIRMWARE_IMAGE_AUTHENTICATION.AuthInfo.CertData) = {Size:08X}'.format (Size = len (self.CertData)))
+ print ('sizeof (Payload) = {Size:08X}'.format (Size = len (self.Payload)))
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/FmpCapsuleHeader.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/FmpCapsuleHeader.py
new file mode 100755
index 00000000..44bdcbc4
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/FmpCapsuleHeader.py
@@ -0,0 +1,310 @@
+## @file
+# Module that encodes and decodes a EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER with
+# a payload.
+#
+# Copyright (c) 2018 - 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+FmpCapsuleHeader
+'''
+
+import struct
+import uuid
+
+class FmpCapsuleImageHeaderClass (object):
+ # typedef struct {
+ # UINT32 Version;
+ #
+ # ///
+ # /// Used to identify device firmware targeted by this update. This guid is matched by
+ # /// system firmware against ImageTypeId field within a EFI_FIRMWARE_IMAGE_DESCRIPTOR
+ # ///
+ # EFI_GUID UpdateImageTypeId;
+ #
+ # ///
+ # /// Passed as ImageIndex in call to EFI_FIRMWARE_MANAGEMENT_PROTOCOL.SetImage ()
+ # ///
+ # UINT8 UpdateImageIndex;
+ # UINT8 reserved_bytes[3];
+ #
+ # ///
+ # /// Size of the binary update image which immediately follows this structure
+ # ///
+ # UINT32 UpdateImageSize;
+ #
+ # ///
+ # /// Size of the VendorCode bytes which optionally immediately follow binary update image in the capsule
+ # ///
+ # UINT32 UpdateVendorCodeSize;
+ #
+ # ///
+ # /// The HardwareInstance to target with this update. If value is zero it means match all
+ # /// HardwareInstances. This field allows update software to target only a single device in
+ # /// cases where there are more than one device with the same ImageTypeId GUID.
+ # /// This header is outside the signed data of the Authentication Info structure and
+ # /// therefore can be modified without changing the Auth data.
+ # ///
+ # UINT64 UpdateHardwareInstance;
+ #
+ # ///
+ # /// Bits which indicate authentication and depex information for the image that follows this structure
+ # ///
+ # UINT64 ImageCapsuleSupport
+ # } EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER;
+ #
+ # #define EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER_INIT_VERSION 0x00000003
+
+ _StructFormat = '<I16sB3BIIQQ'
+ _StructSize = struct.calcsize (_StructFormat)
+
+ EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER_INIT_VERSION = 0x00000003
+
+ def __init__ (self):
+ self._Valid = False
+ self.Version = self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER_INIT_VERSION
+ self.UpdateImageTypeId = uuid.UUID ('00000000-0000-0000-0000-000000000000')
+ self.UpdateImageIndex = 0
+ self.UpdateImageSize = 0
+ self.UpdateVendorCodeSize = 0
+ self.UpdateHardwareInstance = 0x0000000000000000
+ self.ImageCapsuleSupport = 0x0000000000000000
+ self.Payload = b''
+ self.VendorCodeBytes = b''
+
+ def Encode (self):
+ self.UpdateImageSize = len (self.Payload)
+ self.UpdateVendorCodeSize = len (self.VendorCodeBytes)
+ FmpCapsuleImageHeader = struct.pack (
+ self._StructFormat,
+ self.Version,
+ self.UpdateImageTypeId.bytes_le,
+ self.UpdateImageIndex,
+ 0,0,0,
+ self.UpdateImageSize,
+ self.UpdateVendorCodeSize,
+ self.UpdateHardwareInstance,
+ self.ImageCapsuleSupport
+ )
+ self._Valid = True
+ return FmpCapsuleImageHeader + self.Payload + self.VendorCodeBytes
+
+ def Decode (self, Buffer):
+ if len (Buffer) < self._StructSize:
+ raise ValueError
+ (Version, UpdateImageTypeId, UpdateImageIndex, r0, r1, r2, UpdateImageSize, UpdateVendorCodeSize, UpdateHardwareInstance, ImageCapsuleSupport) = \
+ struct.unpack (
+ self._StructFormat,
+ Buffer[0:self._StructSize]
+ )
+
+ if Version < self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER_INIT_VERSION:
+ raise ValueError
+ if UpdateImageIndex < 1:
+ raise ValueError
+ if UpdateImageSize + UpdateVendorCodeSize != len (Buffer[self._StructSize:]):
+ raise ValueError
+
+ self.Version = Version
+ self.UpdateImageTypeId = uuid.UUID (bytes_le = UpdateImageTypeId)
+ self.UpdateImageIndex = UpdateImageIndex
+ self.UpdateImageSize = UpdateImageSize
+ self.UpdateVendorCodeSize = UpdateVendorCodeSize
+ self.UpdateHardwareInstance = UpdateHardwareInstance
+ self.ImageCapsuleSupport = ImageCapsuleSupport
+ self.Payload = Buffer[self._StructSize:self._StructSize + UpdateImageSize]
+ self.VendorCodeBytes = Buffer[self._StructSize + UpdateImageSize:]
+ self._Valid = True
+ return Buffer[self._StructSize:]
+
+ def DumpInfo (self):
+ if not self._Valid:
+ raise ValueError
+ print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.Version = {Version:08X}'.format (Version = self.Version))
+ print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateImageTypeId = {UpdateImageTypeId}'.format (UpdateImageTypeId = str(self.UpdateImageTypeId).upper()))
+ print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateImageIndex = {UpdateImageIndex:08X}'.format (UpdateImageIndex = self.UpdateImageIndex))
+ print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateImageSize = {UpdateImageSize:08X}'.format (UpdateImageSize = self.UpdateImageSize))
+ print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateVendorCodeSize = {UpdateVendorCodeSize:08X}'.format (UpdateVendorCodeSize = self.UpdateVendorCodeSize))
+ print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateHardwareInstance = {UpdateHardwareInstance:016X}'.format (UpdateHardwareInstance = self.UpdateHardwareInstance))
+ print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.ImageCapsuleSupport = {ImageCapsuleSupport:016X}'.format (ImageCapsuleSupport = self.ImageCapsuleSupport))
+ print ('sizeof (Payload) = {Size:08X}'.format (Size = len (self.Payload)))
+ print ('sizeof (VendorCodeBytes) = {Size:08X}'.format (Size = len (self.VendorCodeBytes)))
+
+class FmpCapsuleHeaderClass (object):
+ # typedef struct {
+ # UINT32 Version;
+ #
+ # ///
+ # /// The number of drivers included in the capsule and the number of corresponding
+ # /// offsets stored in ItemOffsetList array.
+ # ///
+ # UINT16 EmbeddedDriverCount;
+ #
+ # ///
+ # /// The number of payload items included in the capsule and the number of
+ # /// corresponding offsets stored in the ItemOffsetList array.
+ # ///
+ # UINT16 PayloadItemCount;
+ #
+ # ///
+ # /// Variable length array of dimension [EmbeddedDriverCount + PayloadItemCount]
+ # /// containing offsets of each of the drivers and payload items contained within the capsule
+ # ///
+ # // UINT64 ItemOffsetList[];
+ # } EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER;
+ #
+ # #define EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER_INIT_VERSION 0x00000001
+ _StructFormat = '<IHH'
+ _StructSize = struct.calcsize (_StructFormat)
+
+ _ItemOffsetFormat = '<Q'
+ _ItemOffsetSize = struct.calcsize (_ItemOffsetFormat)
+
+ EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER_INIT_VERSION = 0x00000001
+ CAPSULE_SUPPORT_AUTHENTICATION = 0x0000000000000001
+ CAPSULE_SUPPORT_DEPENDENCY = 0x0000000000000002
+
+ def __init__ (self):
+ self._Valid = False
+ self.Version = self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER_INIT_VERSION
+ self.EmbeddedDriverCount = 0
+ self.PayloadItemCount = 0
+ self._ItemOffsetList = []
+ self._EmbeddedDriverList = []
+ self._PayloadList = []
+ self._FmpCapsuleImageHeaderList = []
+
+ def AddEmbeddedDriver (self, EmbeddedDriver):
+ self._EmbeddedDriverList.append (EmbeddedDriver)
+
+ def GetEmbeddedDriver (self, Index):
+ if Index > len (self._EmbeddedDriverList):
+ raise ValueError
+ return self._EmbeddedDriverList[Index]
+
+ def AddPayload (self, UpdateImageTypeId, Payload = b'', VendorCodeBytes = b'', HardwareInstance = 0, UpdateImageIndex = 1, CapsuleSupport = 0):
+ self._PayloadList.append ((UpdateImageTypeId, Payload, VendorCodeBytes, HardwareInstance, UpdateImageIndex, CapsuleSupport))
+
+ def GetFmpCapsuleImageHeader (self, Index):
+ if Index >= len (self._FmpCapsuleImageHeaderList):
+ raise ValueError
+ return self._FmpCapsuleImageHeaderList[Index]
+
+ def Encode (self):
+ self.EmbeddedDriverCount = len (self._EmbeddedDriverList)
+ self.PayloadItemCount = len (self._PayloadList)
+
+ FmpCapsuleHeader = struct.pack (
+ self._StructFormat,
+ self.Version,
+ self.EmbeddedDriverCount,
+ self.PayloadItemCount
+ )
+
+ FmpCapsuleData = b''
+ Offset = self._StructSize + (self.EmbeddedDriverCount + self.PayloadItemCount) * self._ItemOffsetSize
+ for EmbeddedDriver in self._EmbeddedDriverList:
+ FmpCapsuleData = FmpCapsuleData + EmbeddedDriver
+ self._ItemOffsetList.append (Offset)
+ Offset = Offset + len (EmbeddedDriver)
+ Index = 1
+ for (UpdateImageTypeId, Payload, VendorCodeBytes, HardwareInstance, UpdateImageIndex, CapsuleSupport) in self._PayloadList:
+ FmpCapsuleImageHeader = FmpCapsuleImageHeaderClass ()
+ FmpCapsuleImageHeader.UpdateImageTypeId = UpdateImageTypeId
+ FmpCapsuleImageHeader.UpdateImageIndex = UpdateImageIndex
+ FmpCapsuleImageHeader.Payload = Payload
+ FmpCapsuleImageHeader.VendorCodeBytes = VendorCodeBytes
+ FmpCapsuleImageHeader.UpdateHardwareInstance = HardwareInstance
+ FmpCapsuleImageHeader.ImageCapsuleSupport = CapsuleSupport
+ FmpCapsuleImage = FmpCapsuleImageHeader.Encode ()
+ FmpCapsuleData = FmpCapsuleData + FmpCapsuleImage
+
+ self._ItemOffsetList.append (Offset)
+ self._FmpCapsuleImageHeaderList.append (FmpCapsuleImageHeader)
+
+ Offset = Offset + len (FmpCapsuleImage)
+ Index = Index + 1
+
+ for Offset in self._ItemOffsetList:
+ FmpCapsuleHeader = FmpCapsuleHeader + struct.pack (self._ItemOffsetFormat, Offset)
+
+ self._Valid = True
+ return FmpCapsuleHeader + FmpCapsuleData
+
+ def Decode (self, Buffer):
+ if len (Buffer) < self._StructSize:
+ raise ValueError
+ (Version, EmbeddedDriverCount, PayloadItemCount) = \
+ struct.unpack (
+ self._StructFormat,
+ Buffer[0:self._StructSize]
+ )
+ if Version < self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER_INIT_VERSION:
+ raise ValueError
+
+ self.Version = Version
+ self.EmbeddedDriverCount = EmbeddedDriverCount
+ self.PayloadItemCount = PayloadItemCount
+ self._ItemOffsetList = []
+ self._EmbeddedDriverList = []
+ self._PayloadList = []
+ self._FmpCapsuleImageHeaderList = []
+
+ #
+ # Parse the ItemOffsetList values
+ #
+ Offset = self._StructSize
+ for Index in range (0, EmbeddedDriverCount + PayloadItemCount):
+ ItemOffset = struct.unpack (self._ItemOffsetFormat, Buffer[Offset:Offset + self._ItemOffsetSize])[0]
+ if ItemOffset >= len (Buffer):
+ raise ValueError
+ self._ItemOffsetList.append (ItemOffset)
+ Offset = Offset + self._ItemOffsetSize
+ Result = Buffer[Offset:]
+
+ #
+ # Parse the EmbeddedDrivers
+ #
+ for Index in range (0, EmbeddedDriverCount):
+ Offset = self._ItemOffsetList[Index]
+ if Index < (len (self._ItemOffsetList) - 1):
+ Length = self._ItemOffsetList[Index + 1] - Offset
+ else:
+ Length = len (Buffer) - Offset
+ self.AddEmbeddedDriver (Buffer[Offset:Offset + Length])
+
+ #
+ # Parse the Payloads that are FMP Capsule Images
+ #
+ for Index in range (EmbeddedDriverCount, EmbeddedDriverCount + PayloadItemCount):
+ Offset = self._ItemOffsetList[Index]
+ if Index < (len (self._ItemOffsetList) - 1):
+ Length = self._ItemOffsetList[Index + 1] - Offset
+ else:
+ Length = len (Buffer) - Offset
+ FmpCapsuleImageHeader = FmpCapsuleImageHeaderClass ()
+ FmpCapsuleImageHeader.Decode (Buffer[Offset:Offset + Length])
+ self.AddPayload (
+ FmpCapsuleImageHeader.UpdateImageTypeId,
+ FmpCapsuleImageHeader.Payload,
+ FmpCapsuleImageHeader.VendorCodeBytes
+ )
+ self._FmpCapsuleImageHeaderList.append (FmpCapsuleImageHeader)
+
+ self._Valid = True
+ return Result
+
+ def DumpInfo (self):
+ if not self._Valid:
+ raise ValueError
+ print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER.Version = {Version:08X}'.format (Version = self.Version))
+ print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER.EmbeddedDriverCount = {EmbeddedDriverCount:08X}'.format (EmbeddedDriverCount = self.EmbeddedDriverCount))
+ for EmbeddedDriver in self._EmbeddedDriverList:
+ print (' sizeof (EmbeddedDriver) = {Size:08X}'.format (Size = len (EmbeddedDriver)))
+ print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER.PayloadItemCount = {PayloadItemCount:08X}'.format (PayloadItemCount = self.PayloadItemCount))
+ print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER.ItemOffsetList = ')
+ for Offset in self._ItemOffsetList:
+ print (' {Offset:016X}'.format (Offset = Offset))
+ for FmpCapsuleImageHeader in self._FmpCapsuleImageHeaderList:
+ FmpCapsuleImageHeader.DumpInfo ()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/UefiCapsuleHeader.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/UefiCapsuleHeader.py
new file mode 100755
index 00000000..109c1e49
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/UefiCapsuleHeader.py
@@ -0,0 +1,130 @@
+## @file
+# Module that encodes and decodes a EFI_CAPSULE_HEADER with a payload
+#
+# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+UefiCapsuleHeader
+'''
+
+import struct
+import uuid
+
+class UefiCapsuleHeaderClass (object):
+ # typedef struct {
+ # ///
+ # /// A GUID that defines the contents of a capsule.
+ # ///
+ # EFI_GUID CapsuleGuid;
+ # ///
+ # /// The size of the capsule header. This may be larger than the size of
+ # /// the EFI_CAPSULE_HEADER since CapsuleGuid may imply
+ # /// extended header entries
+ # ///
+ # UINT32 HeaderSize;
+ # ///
+ # /// Bit-mapped list describing the capsule attributes. The Flag values
+ # /// of 0x0000 - 0xFFFF are defined by CapsuleGuid. Flag values
+ # /// of 0x10000 - 0xFFFFFFFF are defined by this specification
+ # ///
+ # UINT32 Flags;
+ # ///
+ # /// Size in bytes of the capsule.
+ # ///
+ # UINT32 CapsuleImageSize;
+ # } EFI_CAPSULE_HEADER;
+ #
+ # #define CAPSULE_FLAGS_PERSIST_ACROSS_RESET 0x00010000
+ # #define CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE 0x00020000
+ # #define CAPSULE_FLAGS_INITIATE_RESET 0x00040000
+ #
+ _StructFormat = '<16sIIII'
+ _StructSize = struct.calcsize (_StructFormat)
+
+ EFI_FIRMWARE_MANAGEMENT_CAPSULE_ID_GUID = uuid.UUID ('6DCBD5ED-E82D-4C44-BDA1-7194199AD92A')
+
+ _CAPSULE_FLAGS_PERSIST_ACROSS_RESET = 0x00010000
+ _CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE = 0x00020000
+ _CAPSULE_FLAGS_INITIATE_RESET = 0x00040000
+
+ def __init__ (self):
+ self._Valid = False
+ self.CapsuleGuid = self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_ID_GUID
+ self.HeaderSize = self._StructSize
+ self.OemFlags = 0x0000
+ self.PersistAcrossReset = False
+ self.PopulateSystemTable = False
+ self.InitiateReset = False
+ self.CapsuleImageSize = self.HeaderSize
+ self.Payload = b''
+
+ def Encode (self):
+ Flags = self.OemFlags
+ if self.PersistAcrossReset:
+ Flags = Flags | self._CAPSULE_FLAGS_PERSIST_ACROSS_RESET
+ if self.PopulateSystemTable:
+ Flags = Flags | self._CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE
+ if self.InitiateReset:
+ Flags = Flags | self._CAPSULE_FLAGS_INITIATE_RESET
+
+ self.CapsuleImageSize = self.HeaderSize + len (self.Payload)
+
+ UefiCapsuleHeader = struct.pack (
+ self._StructFormat,
+ self.CapsuleGuid.bytes_le,
+ self.HeaderSize,
+ Flags,
+ self.CapsuleImageSize,
+ 0
+ )
+ self._Valid = True
+ return UefiCapsuleHeader + self.Payload
+
+ def Decode (self, Buffer):
+ if len (Buffer) < self._StructSize:
+ raise ValueError
+ (CapsuleGuid, HeaderSize, Flags, CapsuleImageSize, Reserved) = \
+ struct.unpack (
+ self._StructFormat,
+ Buffer[0:self._StructSize]
+ )
+ if HeaderSize < self._StructSize:
+ raise ValueError
+ if CapsuleImageSize != len (Buffer):
+ raise ValueError
+ self.CapsuleGuid = uuid.UUID (bytes_le = CapsuleGuid)
+ self.HeaderSize = HeaderSize
+ self.OemFlags = Flags & 0xffff
+ self.PersistAcrossReset = (Flags & self._CAPSULE_FLAGS_PERSIST_ACROSS_RESET) != 0
+ self.PopulateSystemTable = (Flags & self._CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE) != 0
+ self.InitiateReset = (Flags & self._CAPSULE_FLAGS_INITIATE_RESET) != 0
+ self.CapsuleImageSize = CapsuleImageSize
+ self.Payload = Buffer[self.HeaderSize:]
+
+ self._Valid = True
+ return self.Payload
+
+ def DumpInfo (self):
+ if not self._Valid:
+ raise ValueError
+ Flags = self.OemFlags
+ if self.PersistAcrossReset:
+ Flags = Flags | self._CAPSULE_FLAGS_PERSIST_ACROSS_RESET
+ if self.PopulateSystemTable:
+ Flags = Flags | self._CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE
+ if self.InitiateReset:
+ Flags = Flags | self._CAPSULE_FLAGS_INITIATE_RESET
+ print ('EFI_CAPSULE_HEADER.CapsuleGuid = {Guid}'.format (Guid = str(self.CapsuleGuid).upper()))
+ print ('EFI_CAPSULE_HEADER.HeaderSize = {Size:08X}'.format (Size = self.HeaderSize))
+ print ('EFI_CAPSULE_HEADER.Flags = {Flags:08X}'.format (Flags = Flags))
+ print (' OEM Flags = {Flags:04X}'.format (Flags = self.OemFlags))
+ if self.PersistAcrossReset:
+ print (' CAPSULE_FLAGS_PERSIST_ACROSS_RESET')
+ if self.PopulateSystemTable:
+ print (' CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE')
+ if self.InitiateReset:
+ print (' CAPSULE_FLAGS_INITIATE_RESET')
+ print ('EFI_CAPSULE_HEADER.CapsuleImageSize = {Size:08X}'.format (Size = self.CapsuleImageSize))
+ print ('sizeof (Payload) = {Size:08X}'.format (Size = len (self.Payload)))
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/__init__.py
new file mode 100644
index 00000000..f9a73858
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/Capsule/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'Common.Uefi.Capsule' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/__init__.py
new file mode 100644
index 00000000..47322db6
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/Uefi/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'Common.Uefi' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/VariableAttributes.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/VariableAttributes.py
new file mode 100755
index 00000000..1b451a89
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/VariableAttributes.py
@@ -0,0 +1,51 @@
+# # @file
+#
+# This file is used to handle the variable attributes and property information
+#
+#
+# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+class VariableAttributes(object):
+ EFI_VARIABLE_NON_VOLATILE = 0x00000001
+ EFI_VARIABLE_BOOTSERVICE_ACCESS = 0x00000002
+ EFI_VARIABLE_RUNTIME_ACCESS = 0x00000004
+ VAR_CHECK_VARIABLE_PROPERTY_READ_ONLY = 0x00000001
+ VarAttributesMap = {
+ "NV":EFI_VARIABLE_NON_VOLATILE,
+ "BS":EFI_VARIABLE_BOOTSERVICE_ACCESS,
+ "RT":EFI_VARIABLE_RUNTIME_ACCESS,
+ "RO":VAR_CHECK_VARIABLE_PROPERTY_READ_ONLY
+ }
+
+ def __init__(self):
+ pass
+
+ @staticmethod
+ def GetVarAttributes(var_attr_str):
+ VarAttr = 0x00000000
+ VarProp = 0x00000000
+
+ attr_list = var_attr_str.split(",")
+ for attr in attr_list:
+ attr = attr.strip()
+ if attr == 'RO':
+ VarProp = VariableAttributes.VAR_CHECK_VARIABLE_PROPERTY_READ_ONLY
+ else:
+ VarAttr = VarAttr | VariableAttributes.VarAttributesMap.get(attr, 0x00000000)
+ return VarAttr, VarProp
+ @staticmethod
+ def ValidateVarAttributes(var_attr_str):
+ if not var_attr_str:
+ return True, ""
+ attr_list = var_attr_str.split(",")
+ attr_temp = []
+ for attr in attr_list:
+ attr = attr.strip()
+ attr_temp.append(attr)
+ if attr not in VariableAttributes.VarAttributesMap:
+ return False, "The variable attribute %s is not support to be specified in dsc file. Supported variable attribute are ['BS','NV','RT','RO'] "
+ if 'RT' in attr_temp and 'BS' not in attr_temp:
+ return False, "the RT attribute need the BS attribute to be present"
+ return True, ""
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/VpdInfoFile.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/VpdInfoFile.py
new file mode 100755
index 00000000..ac099a14
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/VpdInfoFile.py
@@ -0,0 +1,255 @@
+## @file
+#
+# This package manage the VPD PCD information file which will be generated
+# by build tool's autogen.
+# The VPD PCD information file will be input for third-party BPDG tool which
+# is pointed by *_*_*_VPD_TOOL_GUID in conf/tools_def.txt
+#
+#
+# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from __future__ import print_function
+import Common.LongFilePathOs as os
+import re
+import Common.EdkLogger as EdkLogger
+import Common.BuildToolError as BuildToolError
+import subprocess
+import Common.GlobalData as GlobalData
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.Misc import SaveFileOnChange
+from Common.DataType import *
+
+FILE_COMMENT_TEMPLATE = \
+"""
+## @file
+#
+# THIS IS AUTO-GENERATED FILE BY BUILD TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
+#
+# This file lists all VPD informations for a platform collected by build.exe.
+#
+# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
+# This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+"""
+
+## The class manage VpdInfoFile.
+#
+# This file contains an ordered (based on position in the DSC file) list of the PCDs specified in the platform description file (DSC). The Value field that will be assigned to the PCD comes from the DSC file, INF file (if not defined in the DSC file) or the DEC file (if not defined in the INF file). This file is used as an input to the BPDG tool.
+# Format for this file (using EBNF notation) is:
+# <File> :: = [<CommentBlock>]
+# [<PcdEntry>]*
+# <CommentBlock> ::= ["#" <String> <EOL>]*
+# <PcdEntry> ::= <PcdName> "|" <Offset> "|" <Size> "|" <Value> <EOL>
+# <PcdName> ::= <TokenSpaceCName> "." <PcdCName>
+# <TokenSpaceCName> ::= C Variable Name of the Token Space GUID
+# <PcdCName> ::= C Variable Name of the PCD
+# <Offset> ::= {TAB_STAR} {<HexNumber>}
+# <HexNumber> ::= "0x" (a-fA-F0-9){1,8}
+# <Size> ::= <HexNumber>
+# <Value> ::= {<HexNumber>} {<NonNegativeInt>} {<QString>} {<Array>}
+# <NonNegativeInt> ::= (0-9)+
+# <QString> ::= ["L"] <DblQuote> <String> <DblQuote>
+# <DblQuote> ::= 0x22
+# <Array> ::= {<CArray>} {<NList>}
+# <CArray> ::= "{" <HexNumber> ["," <HexNumber>]* "}"
+# <NList> ::= <HexNumber> ["," <HexNumber>]*
+#
+class VpdInfoFile:
+
+ _rVpdPcdLine = None
+ ## Constructor
+ def __init__(self):
+ ## Dictionary for VPD in following format
+ #
+ # Key : PcdClassObject instance.
+ # @see BuildClassObject.PcdClassObject
+ # Value : offset in different SKU such as [sku1_offset, sku2_offset]
+ self._VpdArray = {}
+ self._VpdInfo = {}
+
+ ## Add a VPD PCD collected from platform's autogen when building.
+ #
+ # @param vpds The list of VPD PCD collected for a platform.
+ # @see BuildClassObject.PcdClassObject
+ #
+ # @param offset integer value for VPD's offset in specific SKU.
+ #
+ def Add(self, Vpd, skuname, Offset):
+ if (Vpd is None):
+ EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
+
+ if not (Offset >= "0" or Offset == TAB_STAR):
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
+
+ if Vpd.DatumType == TAB_VOID:
+ if Vpd.MaxDatumSize <= "0":
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
+ "Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
+ elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:
+ if not Vpd.MaxDatumSize:
+ Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType]
+ else:
+ if Vpd.MaxDatumSize <= "0":
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
+ "Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
+
+ if Vpd not in self._VpdArray:
+ #
+ # If there is no Vpd instance in dict, that imply this offset for a given SKU is a new one
+ #
+ self._VpdArray[Vpd] = {}
+
+ self._VpdArray[Vpd].update({skuname:Offset})
+
+
+ ## Generate VPD PCD information into a text file
+ #
+ # If parameter FilePath is invalid, then assert.
+ # If
+ # @param FilePath The given file path which would hold VPD information
+ def Write(self, FilePath):
+ if not (FilePath is not None or len(FilePath) != 0):
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
+ "Invalid parameter FilePath: %s." % FilePath)
+
+ Content = FILE_COMMENT_TEMPLATE
+ Pcds = sorted(self._VpdArray.keys(), key=lambda x: x.TokenCName)
+ for Pcd in Pcds:
+ i = 0
+ PcdTokenCName = Pcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ PcdTokenCName = PcdItem[0]
+ for skuname in self._VpdArray[Pcd]:
+ PcdValue = str(Pcd.SkuInfoList[skuname].DefaultValue).strip()
+ if PcdValue == "" :
+ PcdValue = Pcd.DefaultValue
+
+ Content += "%s.%s|%s|%s|%s|%s \n" % (Pcd.TokenSpaceGuidCName, PcdTokenCName, skuname, str(self._VpdArray[Pcd][skuname]).strip(), str(Pcd.MaxDatumSize).strip(), PcdValue)
+ i += 1
+
+ return SaveFileOnChange(FilePath, Content, False)
+
+ ## Read an existing VPD PCD info file.
+ #
+ # This routine will read VPD PCD information from existing file and construct
+ # internal PcdClassObject array.
+ # This routine could be used by third-party tool to parse VPD info file content.
+ #
+ # @param FilePath The full path string for existing VPD PCD info file.
+ def Read(self, FilePath):
+ try:
+ fd = open(FilePath, "r")
+ except:
+ EdkLogger.error("VpdInfoFile",
+ BuildToolError.FILE_OPEN_FAILURE,
+ "Fail to open file %s for written." % FilePath)
+ Lines = fd.readlines()
+ for Line in Lines:
+ Line = Line.strip()
+ if len(Line) == 0 or Line.startswith("#"):
+ continue
+
+ #
+ # the line must follow output format defined in BPDG spec.
+ #
+ try:
+ PcdName, SkuId, Offset, Size, Value = Line.split("#")[0].split("|")
+ PcdName, SkuId, Offset, Size, Value = PcdName.strip(), SkuId.strip(), Offset.strip(), Size.strip(), Value.strip()
+ TokenSpaceName, PcdTokenName = PcdName.split(".")
+ except:
+ EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Fail to parse VPD information file %s" % FilePath)
+
+ Found = False
+
+ if (TokenSpaceName, PcdTokenName) not in self._VpdInfo:
+ self._VpdInfo[(TokenSpaceName, PcdTokenName)] = {}
+ self._VpdInfo[(TokenSpaceName, PcdTokenName)][(SkuId, Offset)] = Value
+ for VpdObject in self._VpdArray:
+ VpdObjectTokenCName = VpdObject.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (VpdObject.TokenCName, VpdObject.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ VpdObjectTokenCName = PcdItem[0]
+ for sku in VpdObject.SkuInfoList:
+ if VpdObject.TokenSpaceGuidCName == TokenSpaceName and VpdObjectTokenCName == PcdTokenName.strip() and sku == SkuId:
+ if self._VpdArray[VpdObject][sku] == TAB_STAR:
+ if Offset == TAB_STAR:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName)
+ self._VpdArray[VpdObject][sku] = Offset
+ Found = True
+ if not Found:
+ EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Can not find PCD defined in VPD guid file.")
+
+ ## Get count of VPD PCD collected from platform's autogen when building.
+ #
+ # @return The integer count value
+ def GetCount(self):
+ Count = 0
+ for OffsetList in self._VpdArray.values():
+ Count += len(OffsetList)
+
+ return Count
+
+ ## Get an offset value for a given VPD PCD
+ #
+ # Because BPDG only support one Sku, so only return offset for SKU default.
+ #
+ # @param vpd A given VPD PCD
+ def GetOffset(self, vpd):
+ if vpd not in self._VpdArray:
+ return None
+
+ if len(self._VpdArray[vpd]) == 0:
+ return None
+
+ return self._VpdArray[vpd]
+ def GetVpdInfo(self, arg):
+ (PcdTokenName, TokenSpaceName) = arg
+ return [(sku,offset,value) for (sku,offset),value in self._VpdInfo.get((TokenSpaceName, PcdTokenName)).items()]
+
+## Call external BPDG tool to process VPD file
+#
+# @param ToolPath The string path name for BPDG tool
+# @param VpdFileName The string path name for VPD information guid.txt
+#
+def CallExtenalBPDGTool(ToolPath, VpdFileName):
+ assert ToolPath is not None, "Invalid parameter ToolPath"
+ assert VpdFileName is not None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
+
+ OutputDir = os.path.dirname(VpdFileName)
+ FileName = os.path.basename(VpdFileName)
+ BaseName, ext = os.path.splitext(FileName)
+ OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
+ OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)
+
+ try:
+ PopenObject = subprocess.Popen(' '.join([ToolPath,
+ '-o', OutputBinFileName,
+ '-m', OutputMapFileName,
+ '-q',
+ '-f',
+ VpdFileName]),
+ stdout=subprocess.PIPE,
+ stderr= subprocess.PIPE,
+ shell=True)
+ except Exception as X:
+ EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData=str(X))
+ (out, error) = PopenObject.communicate()
+ print(out.decode())
+ while PopenObject.returncode is None :
+ PopenObject.wait()
+
+ if PopenObject.returncode != 0:
+ EdkLogger.debug(EdkLogger.DEBUG_1, "Fail to call BPDG tool", str(error))
+ EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, "Fail to execute BPDG tool with exit code: %d, the error message is: \n %s" % \
+ (PopenObject.returncode, str(error)))
+
+ return PopenObject.returncode
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/__init__.py
new file mode 100644
index 00000000..b28ac448
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'Common' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/caching.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/caching.py
new file mode 100755
index 00000000..681740c1
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/caching.py
@@ -0,0 +1,41 @@
+## @file
+# help with caching in BaseTools
+#
+# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+
+# for class function
+class cached_class_function(object):
+ def __init__(self, function):
+ self._function = function
+ def __get__(self, obj, cls):
+ def CallMeHere(*args,**kwargs):
+ Value = self._function(obj, *args,**kwargs)
+ obj.__dict__[self._function.__name__] = lambda *args,**kwargs:Value
+ return Value
+ return CallMeHere
+
+# for class property
+class cached_property(object):
+ def __init__(self, function):
+ self._function = function
+ def __get__(self, obj, cls):
+ Value = obj.__dict__[self._function.__name__] = self._function(obj)
+ return Value
+
+# for non-class function
+class cached_basic_function(object):
+ def __init__(self, function):
+ self._function = function
+ # wrapper to call _do since <class>.__dict__ doesn't support changing __call__
+ def __call__(self,*args,**kwargs):
+ return self._do(*args,**kwargs)
+ def _do(self,*args,**kwargs):
+ Value = self._function(*args,**kwargs)
+ self.__dict__['_do'] = lambda self,*args,**kwargs:Value
+ return Value
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/CommonClass.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/CommonClass.py
new file mode 100755
index 00000000..47f58145
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/CommonClass.py
@@ -0,0 +1,91 @@
+## @file
+# This file is used to define common items of class object
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+
+## SkuInfoClass
+#
+# This class defined SkuInfo item used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+# @param SkuIdName: Input value for SkuIdName, default is ''
+# @param SkuId: Input value for SkuId, default is ''
+# @param VariableName: Input value for VariableName, default is ''
+# @param VariableGuid: Input value for VariableGuid, default is ''
+# @param VariableOffset: Input value for VariableOffset, default is ''
+# @param HiiDefaultValue: Input value for HiiDefaultValue, default is ''
+# @param VpdOffset: Input value for VpdOffset, default is ''
+# @param DefaultValue: Input value for DefaultValue, default is ''
+#
+# @var SkuIdName: To store value for SkuIdName
+# @var SkuId: To store value for SkuId
+# @var VariableName: To store value for VariableName
+# @var VariableGuid: To store value for VariableGuid
+# @var VariableOffset: To store value for VariableOffset
+# @var HiiDefaultValue: To store value for HiiDefaultValue
+# @var VpdOffset: To store value for VpdOffset
+# @var DefaultValue: To store value for DefaultValue
+#
+class SkuInfoClass(object):
+ def __init__(self, SkuIdName = '', SkuId = '', VariableName = '', VariableGuid = '', VariableOffset = '',
+ HiiDefaultValue = '', VpdOffset = '', DefaultValue = '', VariableGuidValue = '', VariableAttribute = '', DefaultStore = None):
+ self.SkuIdName = SkuIdName
+ self.SkuId = SkuId
+
+ #
+ # Used by Hii
+ #
+ if DefaultStore is None:
+ DefaultStore = {}
+ self.VariableName = VariableName
+ self.VariableGuid = VariableGuid
+ self.VariableGuidValue = VariableGuidValue
+ self.VariableOffset = VariableOffset
+ self.HiiDefaultValue = HiiDefaultValue
+ self.VariableAttribute = VariableAttribute
+ self.DefaultStoreDict = DefaultStore
+
+ #
+ # Used by Vpd
+ #
+ self.VpdOffset = VpdOffset
+
+ #
+ # Used by Default
+ #
+ self.DefaultValue = DefaultValue
+
+ ## Convert the class to a string
+ #
+ # Convert each member of the class to string
+ # Organize to a single line format string
+ #
+ # @retval Rtn Formatted String
+ #
+ def __str__(self):
+ Rtn = 'SkuId = ' + str(self.SkuId) + "," + \
+ 'SkuIdName = ' + str(self.SkuIdName) + "," + \
+ 'VariableName = ' + str(self.VariableName) + "," + \
+ 'VariableGuid = ' + str(self.VariableGuid) + "," + \
+ 'VariableOffset = ' + str(self.VariableOffset) + "," + \
+ 'HiiDefaultValue = ' + str(self.HiiDefaultValue) + "," + \
+ 'VpdOffset = ' + str(self.VpdOffset) + "," + \
+ 'DefaultValue = ' + str(self.DefaultValue) + ","
+ return Rtn
+
+ def __deepcopy__(self,memo):
+ new_sku = SkuInfoClass()
+ new_sku.SkuIdName = self.SkuIdName
+ new_sku.SkuId = self.SkuId
+ new_sku.VariableName = self.VariableName
+ new_sku.VariableGuid = self.VariableGuid
+ new_sku.VariableGuidValue = self.VariableGuidValue
+ new_sku.VariableOffset = self.VariableOffset
+ new_sku.HiiDefaultValue = self.HiiDefaultValue
+ new_sku.VariableAttribute = self.VariableAttribute
+ new_sku.DefaultStoreDict = {key:value for key,value in self.DefaultStoreDict.items()}
+ new_sku.VpdOffset = self.VpdOffset
+ new_sku.DefaultValue = self.DefaultValue
+ return new_sku
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/DataClass.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/DataClass.py
new file mode 100644
index 00000000..b3b6337d
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/DataClass.py
@@ -0,0 +1,369 @@
+## @file
+# This file is used to define class for data structure used in ECC
+#
+# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+
+##
+# Static values for data models
+#
+MODEL_UNKNOWN = 0
+
+MODEL_FILE_C = 1001
+MODEL_FILE_H = 1002
+MODEL_FILE_ASM = 1003
+MODEL_FILE_INF = 1011
+MODEL_FILE_DEC = 1012
+MODEL_FILE_DSC = 1013
+MODEL_FILE_FDF = 1014
+MODEL_FILE_INC = 1015
+MODEL_FILE_CIF = 1016
+MODEL_FILE_UNI = 1017
+MODEL_FILE_OTHERS = 1099
+
+MODEL_IDENTIFIER_FILE_HEADER = 2001
+MODEL_IDENTIFIER_FUNCTION_HEADER = 2002
+MODEL_IDENTIFIER_COMMENT = 2003
+MODEL_IDENTIFIER_PARAMETER = 2004
+MODEL_IDENTIFIER_STRUCTURE = 2005
+MODEL_IDENTIFIER_VARIABLE = 2006
+MODEL_IDENTIFIER_INCLUDE = 2007
+MODEL_IDENTIFIER_PREDICATE_EXPRESSION = 2008
+MODEL_IDENTIFIER_ENUMERATE = 2009
+MODEL_IDENTIFIER_PCD = 2010
+MODEL_IDENTIFIER_UNION = 2011
+MODEL_IDENTIFIER_MACRO_IFDEF = 2012
+MODEL_IDENTIFIER_MACRO_IFNDEF = 2013
+MODEL_IDENTIFIER_MACRO_DEFINE = 2014
+MODEL_IDENTIFIER_MACRO_ENDIF = 2015
+MODEL_IDENTIFIER_MACRO_PROGMA = 2016
+MODEL_IDENTIFIER_FUNCTION_CALLING = 2018
+MODEL_IDENTIFIER_TYPEDEF = 2017
+MODEL_IDENTIFIER_FUNCTION_DECLARATION = 2019
+MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION = 2020
+
+MODEL_EFI_PROTOCOL = 3001
+MODEL_EFI_PPI = 3002
+MODEL_EFI_GUID = 3003
+MODEL_EFI_LIBRARY_CLASS = 3004
+MODEL_EFI_LIBRARY_INSTANCE = 3005
+MODEL_EFI_PCD = 3006
+MODEL_EFI_SOURCE_FILE = 3007
+MODEL_EFI_BINARY_FILE = 3008
+MODEL_EFI_SKU_ID = 3009
+MODEL_EFI_INCLUDE = 3010
+MODEL_EFI_DEPEX = 3011
+MODEL_EFI_DEFAULT_STORES = 3012
+
+MODEL_PCD = 4000
+MODEL_PCD_FIXED_AT_BUILD = 4001
+MODEL_PCD_PATCHABLE_IN_MODULE = 4002
+MODEL_PCD_FEATURE_FLAG = 4003
+MODEL_PCD_DYNAMIC_EX = 4004
+MODEL_PCD_DYNAMIC_EX_DEFAULT = 4005
+MODEL_PCD_DYNAMIC_EX_VPD = 4006
+MODEL_PCD_DYNAMIC_EX_HII = 4007
+MODEL_PCD_DYNAMIC = 4008
+MODEL_PCD_DYNAMIC_DEFAULT = 4009
+MODEL_PCD_DYNAMIC_VPD = 4010
+MODEL_PCD_DYNAMIC_HII = 4011
+MODEL_PCD_TYPE_LIST = [MODEL_PCD_FIXED_AT_BUILD,
+ MODEL_PCD_PATCHABLE_IN_MODULE,
+ MODEL_PCD_FEATURE_FLAG,
+ MODEL_PCD_DYNAMIC_DEFAULT,
+ MODEL_PCD_DYNAMIC_HII,
+ MODEL_PCD_DYNAMIC_VPD,
+ MODEL_PCD_DYNAMIC_EX_DEFAULT,
+ MODEL_PCD_DYNAMIC_EX_HII,
+ MODEL_PCD_DYNAMIC_EX_VPD
+ ]
+
+MODEL_META_DATA_HEADER_COMMENT = 5000
+MODEL_META_DATA_HEADER = 5001
+MODEL_META_DATA_INCLUDE = 5002
+MODEL_META_DATA_DEFINE = 5003
+MODEL_META_DATA_CONDITIONAL_STATEMENT_IF = 5004
+MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE = 5005
+MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF = 5006
+MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF = 5007
+MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR = 5400
+MODEL_META_DATA_BUILD_OPTION = 5008
+MODEL_META_DATA_COMPONENT = 5009
+MODEL_META_DATA_USER_EXTENSION = 5010
+MODEL_META_DATA_PACKAGE = 5011
+MODEL_META_DATA_NMAKE = 5012
+MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF = 5013
+MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014
+MODEL_META_DATA_COMMENT = 5016
+MODEL_META_DATA_GLOBAL_DEFINE = 5017
+MODEL_META_DATA_SECTION_HEADER = 5100
+MODEL_META_DATA_SUBSECTION_HEADER = 5200
+MODEL_META_DATA_TAIL_COMMENT = 5300
+
+MODEL_EXTERNAL_DEPENDENCY = 10000
+
+MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
+ ('MODEL_FILE_C', MODEL_FILE_C),
+ ('MODEL_FILE_H', MODEL_FILE_H),
+ ('MODEL_FILE_ASM', MODEL_FILE_ASM),
+ ('MODEL_FILE_INF', MODEL_FILE_INF),
+ ('MODEL_FILE_DEC', MODEL_FILE_DEC),
+ ('MODEL_FILE_DSC', MODEL_FILE_DSC),
+ ('MODEL_FILE_FDF', MODEL_FILE_FDF),
+ ('MODEL_FILE_INC', MODEL_FILE_INC),
+ ('MODEL_FILE_CIF', MODEL_FILE_CIF),
+ ('MODEL_FILE_OTHERS', MODEL_FILE_OTHERS),
+ ('MODEL_IDENTIFIER_FILE_HEADER', MODEL_IDENTIFIER_FILE_HEADER),
+ ('MODEL_IDENTIFIER_FUNCTION_HEADER', MODEL_IDENTIFIER_FUNCTION_HEADER),
+ ('MODEL_IDENTIFIER_COMMENT', MODEL_IDENTIFIER_COMMENT),
+ ('MODEL_IDENTIFIER_PARAMETER', MODEL_IDENTIFIER_PARAMETER),
+ ('MODEL_IDENTIFIER_STRUCTURE', MODEL_IDENTIFIER_STRUCTURE),
+ ('MODEL_IDENTIFIER_VARIABLE', MODEL_IDENTIFIER_VARIABLE),
+ ('MODEL_IDENTIFIER_INCLUDE', MODEL_IDENTIFIER_INCLUDE),
+ ('MODEL_IDENTIFIER_PREDICATE_EXPRESSION', MODEL_IDENTIFIER_PREDICATE_EXPRESSION),
+ ('MODEL_IDENTIFIER_ENUMERATE', MODEL_IDENTIFIER_ENUMERATE),
+ ('MODEL_IDENTIFIER_PCD', MODEL_IDENTIFIER_PCD),
+ ('MODEL_IDENTIFIER_UNION', MODEL_IDENTIFIER_UNION),
+ ('MODEL_IDENTIFIER_MACRO_IFDEF', MODEL_IDENTIFIER_MACRO_IFDEF),
+ ('MODEL_IDENTIFIER_MACRO_IFNDEF', MODEL_IDENTIFIER_MACRO_IFNDEF),
+ ('MODEL_IDENTIFIER_MACRO_DEFINE', MODEL_IDENTIFIER_MACRO_DEFINE),
+ ('MODEL_IDENTIFIER_MACRO_ENDIF', MODEL_IDENTIFIER_MACRO_ENDIF),
+ ('MODEL_IDENTIFIER_MACRO_PROGMA', MODEL_IDENTIFIER_MACRO_PROGMA),
+ ('MODEL_IDENTIFIER_FUNCTION_CALLING', MODEL_IDENTIFIER_FUNCTION_CALLING),
+ ('MODEL_IDENTIFIER_TYPEDEF', MODEL_IDENTIFIER_TYPEDEF),
+ ('MODEL_IDENTIFIER_FUNCTION_DECLARATION', MODEL_IDENTIFIER_FUNCTION_DECLARATION),
+ ('MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION', MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION),
+ ('MODEL_EFI_PROTOCOL', MODEL_EFI_PROTOCOL),
+ ('MODEL_EFI_PPI', MODEL_EFI_PPI),
+ ('MODEL_EFI_GUID', MODEL_EFI_GUID),
+ ('MODEL_EFI_LIBRARY_CLASS', MODEL_EFI_LIBRARY_CLASS),
+ ('MODEL_EFI_LIBRARY_INSTANCE', MODEL_EFI_LIBRARY_INSTANCE),
+ ('MODEL_EFI_PCD', MODEL_EFI_PCD),
+ ('MODEL_EFI_SKU_ID', MODEL_EFI_SKU_ID),
+ ('MODEL_EFI_INCLUDE', MODEL_EFI_INCLUDE),
+ ('MODEL_EFI_DEPEX', MODEL_EFI_DEPEX),
+ ('MODEL_IDENTIFIER_UNION', MODEL_IDENTIFIER_UNION),
+ ('MODEL_EFI_SOURCE_FILE', MODEL_EFI_SOURCE_FILE),
+ ('MODEL_EFI_BINARY_FILE', MODEL_EFI_BINARY_FILE),
+ ('MODEL_PCD', MODEL_PCD),
+ ('MODEL_PCD_FIXED_AT_BUILD', MODEL_PCD_FIXED_AT_BUILD),
+ ('MODEL_PCD_PATCHABLE_IN_MODULE', MODEL_PCD_PATCHABLE_IN_MODULE),
+ ('MODEL_PCD_FEATURE_FLAG', MODEL_PCD_FEATURE_FLAG),
+ ('MODEL_PCD_DYNAMIC_EX', MODEL_PCD_DYNAMIC_EX),
+ ('MODEL_PCD_DYNAMIC_EX_DEFAULT', MODEL_PCD_DYNAMIC_EX_DEFAULT),
+ ('MODEL_PCD_DYNAMIC_EX_VPD', MODEL_PCD_DYNAMIC_EX_VPD),
+ ('MODEL_PCD_DYNAMIC_EX_HII', MODEL_PCD_DYNAMIC_EX_HII),
+ ('MODEL_PCD_DYNAMIC', MODEL_PCD_DYNAMIC),
+ ('MODEL_PCD_DYNAMIC_DEFAULT', MODEL_PCD_DYNAMIC_DEFAULT),
+ ('MODEL_PCD_DYNAMIC_VPD', MODEL_PCD_DYNAMIC_VPD),
+ ('MODEL_PCD_DYNAMIC_HII', MODEL_PCD_DYNAMIC_HII),
+ ("MODEL_META_DATA_HEADER", MODEL_META_DATA_HEADER),
+ ("MODEL_META_DATA_INCLUDE", MODEL_META_DATA_INCLUDE),
+ ("MODEL_META_DATA_DEFINE", MODEL_META_DATA_DEFINE),
+ ("MODEL_META_DATA_CONDITIONAL_STATEMENT_IF", MODEL_META_DATA_CONDITIONAL_STATEMENT_IF),
+ ("MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE", MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE),
+ ("MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF", MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF),
+ ("MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF", MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF),
+ ("MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR", MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR),
+ ("MODEL_META_DATA_BUILD_OPTION", MODEL_META_DATA_BUILD_OPTION),
+ ("MODEL_META_DATA_COMPONENT", MODEL_META_DATA_COMPONENT),
+ ('MODEL_META_DATA_USER_EXTENSION', MODEL_META_DATA_USER_EXTENSION),
+ ('MODEL_META_DATA_PACKAGE', MODEL_META_DATA_PACKAGE),
+ ('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE),
+ ('MODEL_META_DATA_COMMENT', MODEL_META_DATA_COMMENT)
+ ]
+
+## FunctionClass
+#
+# This class defines a structure of a function
+#
+# @param ID: ID of a Function
+# @param Header: Header of a Function
+# @param Modifier: Modifier of a Function
+# @param Name: Name of a Function
+# @param ReturnStatement: ReturnStatement of a Function
+# @param StartLine: StartLine of a Function
+# @param StartColumn: StartColumn of a Function
+# @param EndLine: EndLine of a Function
+# @param EndColumn: EndColumn of a Function
+# @param BodyStartLine: BodyStartLine of a Function Body
+# @param BodyStartColumn: BodyStartColumn of a Function Body
+# @param BelongsToFile: The Function belongs to which file
+# @param IdentifierList: IdentifierList of a File
+# @param PcdList: PcdList of a File
+#
+# @var ID: ID of a Function
+# @var Header: Header of a Function
+# @var Modifier: Modifier of a Function
+# @var Name: Name of a Function
+# @var ReturnStatement: ReturnStatement of a Function
+# @var StartLine: StartLine of a Function
+# @var StartColumn: StartColumn of a Function
+# @var EndLine: EndLine of a Function
+# @var EndColumn: EndColumn of a Function
+# @var BodyStartLine: StartLine of a Function Body
+# @var BodyStartColumn: StartColumn of a Function Body
+# @var BelongsToFile: The Function belongs to which file
+# @var IdentifierList: IdentifierList of a File
+# @var PcdList: PcdList of a File
+#
+class FunctionClass(object):
+ def __init__(self, ID = -1, Header = '', Modifier = '', Name = '', ReturnStatement = '', \
+ StartLine = -1, StartColumn = -1, EndLine = -1, EndColumn = -1, \
+ BodyStartLine = -1, BodyStartColumn = -1, BelongsToFile = -1, \
+ IdentifierList = [], PcdList = [], \
+ FunNameStartLine = -1, FunNameStartColumn = -1):
+ self.ID = ID
+ self.Header = Header
+ self.Modifier = Modifier
+ self.Name = Name
+ self.ReturnStatement = ReturnStatement
+ self.StartLine = StartLine
+ self.StartColumn = StartColumn
+ self.EndLine = EndLine
+ self.EndColumn = EndColumn
+ self.BodyStartLine = BodyStartLine
+ self.BodyStartColumn = BodyStartColumn
+ self.BelongsToFile = BelongsToFile
+ self.FunNameStartLine = FunNameStartLine
+ self.FunNameStartColumn = FunNameStartColumn
+
+ self.IdentifierList = IdentifierList
+ self.PcdList = PcdList
+
+## IdentifierClass
+#
+# This class defines a structure of a variable
+#
+# @param ID: ID of a Identifier
+# @param Modifier: Modifier of a Identifier
+# @param Type: Type of a Identifier
+# @param Name: Name of a Identifier
+# @param Value: Value of a Identifier
+# @param Model: Model of a Identifier
+# @param BelongsToFile: The Identifier belongs to which file
+# @param BelongsToFunction: The Identifier belongs to which function
+# @param StartLine: StartLine of a Identifier
+# @param StartColumn: StartColumn of a Identifier
+# @param EndLine: EndLine of a Identifier
+# @param EndColumn: EndColumn of a Identifier
+#
+# @var ID: ID of a Identifier
+# @var Modifier: Modifier of a Identifier
+# @var Type: Type of a Identifier
+# @var Name: Name of a Identifier
+# @var Value: Value of a Identifier
+# @var Model: Model of a Identifier
+# @var BelongsToFile: The Identifier belongs to which file
+# @var BelongsToFunction: The Identifier belongs to which function
+# @var StartLine: StartLine of a Identifier
+# @var StartColumn: StartColumn of a Identifier
+# @var EndLine: EndLine of a Identifier
+# @var EndColumn: EndColumn of a Identifier
+#
+class IdentifierClass(object):
+ def __init__(self, ID = -1, Modifier = '', Type = '', Name = '', Value = '', Model = MODEL_UNKNOWN, \
+ BelongsToFile = -1, BelongsToFunction = -1, StartLine = -1, StartColumn = -1, EndLine = -1, EndColumn = -1):
+ self.ID = ID
+ self.Modifier = Modifier
+ self.Type = Type
+ self.Name = Name
+ self.Value = Value
+ self.Model = Model
+ self.BelongsToFile = BelongsToFile
+ self.BelongsToFunction = BelongsToFunction
+ self.StartLine = StartLine
+ self.StartColumn = StartColumn
+ self.EndLine = EndLine
+ self.EndColumn = EndColumn
+
+## PcdClass
+#
+# This class defines a structure of a Pcd
+#
+# @param ID: ID of a Pcd
+# @param CName: CName of a Pcd
+# @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
+# @param Token: Token of a Pcd
+# @param DatumType: DatumType of a Pcd
+# @param Model: Model of a Pcd
+# @param BelongsToFile: The Pcd belongs to which file
+# @param BelongsToFunction: The Pcd belongs to which function
+# @param StartLine: StartLine of a Pcd
+# @param StartColumn: StartColumn of a Pcd
+# @param EndLine: EndLine of a Pcd
+# @param EndColumn: EndColumn of a Pcd
+#
+# @var ID: ID of a Pcd
+# @var CName: CName of a Pcd
+# @var TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
+# @var Token: Token of a Pcd
+# @var DatumType: DatumType of a Pcd
+# @var Model: Model of a Pcd
+# @var BelongsToFile: The Pcd belongs to which file
+# @var BelongsToFunction: The Pcd belongs to which function
+# @var StartLine: StartLine of a Pcd
+# @var StartColumn: StartColumn of a Pcd
+# @var EndLine: EndLine of a Pcd
+# @var EndColumn: EndColumn of a Pcd
+#
+class PcdDataClass(object):
+ def __init__(self, ID = -1, CName = '', TokenSpaceGuidCName = '', Token = '', DatumType = '', Model = MODEL_UNKNOWN, \
+ BelongsToFile = -1, BelongsToFunction = -1, StartLine = -1, StartColumn = -1, EndLine = -1, EndColumn = -1):
+ self.ID = ID
+ self.CName = CName
+ self.TokenSpaceGuidCName = TokenSpaceGuidCName
+ self.Token = Token
+ self.DatumType = DatumType
+ self.BelongsToFile = BelongsToFile
+ self.BelongsToFunction = BelongsToFunction
+ self.StartLine = StartLine
+ self.StartColumn = StartColumn
+ self.EndLine = EndLine
+ self.EndColumn = EndColumn
+
+## FileClass
+#
+# This class defines a structure of a file
+#
+# @param ID: ID of a File
+# @param Name: Name of a File
+# @param ExtName: ExtName of a File
+# @param Path: Path of a File
+# @param FullPath: FullPath of a File
+# @param Model: Model of a File
+# @param TimeStamp: TimeStamp of a File
+# @param FunctionList: FunctionList of a File
+# @param IdentifierList: IdentifierList of a File
+# @param PcdList: PcdList of a File
+#
+# @var ID: ID of a File
+# @var Name: Name of a File
+# @var ExtName: ExtName of a File
+# @var Path: Path of a File
+# @var FullPath: FullPath of a File
+# @var Model: Model of a File
+# @var TimeStamp: TimeStamp of a File
+# @var FunctionList: FunctionList of a File
+# @var IdentifierList: IdentifierList of a File
+# @var PcdList: PcdList of a File
+#
+class FileClass(object):
+ def __init__(self, ID = -1, Name = '', ExtName = '', Path = '', FullPath = '', Model = MODEL_UNKNOWN, TimeStamp = '', \
+ FunctionList = [], IdentifierList = [], PcdList = []):
+ self.ID = ID
+ self.Name = Name
+ self.ExtName = ExtName
+ self.Path = Path
+ self.FullPath = FullPath
+ self.Model = Model
+ self.TimeStamp = TimeStamp
+
+ self.FunctionList = FunctionList
+ self.IdentifierList = IdentifierList
+ self.PcdList = PcdList
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/Exceptions.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/Exceptions.py
new file mode 100644
index 00000000..c0b5e460
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/Exceptions.py
@@ -0,0 +1,23 @@
+## @file
+# This file is used to define common Exceptions class used in python tools
+#
+# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+## Exceptions used in Expression
+class EvaluationException(Exception):
+ pass
+
+class BadExpression(EvaluationException):
+ pass
+
+class WrnExpression(Exception):
+ pass
+
+## Exceptions used in macro replacements
+class MacroException(Exception):
+ pass
+
+class SymbolNotFound(MacroException):
+ pass
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/FdfClass.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/FdfClass.py
new file mode 100755
index 00000000..8e70121f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/FdfClass.py
@@ -0,0 +1,312 @@
+## @file
+# classes represent data in FDF
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## FD data in FDF
+#
+#
+class FDClassObject:
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.FdUiName = ''
+ self.CreateFileName = None
+ self.BaseAddress = None
+ self.BaseAddressPcd = None
+ self.Size = None
+ self.SizePcd = None
+ self.ErasePolarity = None
+ # 3-tuple list (blockSize, numBlocks, pcd)
+ self.BlockSizeList = []
+ # DefineVarDict[var] = value
+ self.DefineVarDict = {}
+ # SetVarDict[var] = value
+ self.SetVarDict = {}
+ self.RegionList = []
+
+## FFS data in FDF
+#
+#
+class FfsClassObject:
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.NameGuid = None
+ self.Fixed = False
+ self.CheckSum = False
+ self.Alignment = None
+ self.SectionList = []
+
+## FILE statement data in FDF
+#
+#
+class FileStatementClassObject (FfsClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FfsClassObject.__init__(self)
+ self.FvFileType = None
+ self.FileName = None
+ self.KeyStringList = []
+ self.FvName = None
+ self.FdName = None
+ self.DefineVarDict = {}
+ self.KeepReloc = None
+
+## INF statement data in FDF
+#
+#
+class FfsInfStatementClassObject(FfsClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FfsClassObject.__init__(self)
+ self.Rule = None
+ self.Version = None
+ self.Ui = None
+ self.InfFileName = None
+ self.BuildNum = ''
+ self.KeyStringList = []
+ self.KeepReloc = None
+ self.UseArch = None
+
+## section data in FDF
+#
+#
+class SectionClassObject:
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.Alignment = None
+
+## Depex expression section in FDF
+#
+#
+class DepexSectionClassObject (SectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.DepexType = None
+ self.Expression = None
+ self.ExpressionProcessed = False
+
+## Compress section data in FDF
+#
+#
+class CompressSectionClassObject (SectionClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.CompType = None
+ self.SectionList = []
+
+## Data section data in FDF
+#
+#
+class DataSectionClassObject (SectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.SecType = None
+ self.SectFileName = None
+ self.SectionList = []
+ self.KeepReloc = True
+
+## Rule section data in FDF
+#
+#
+class EfiSectionClassObject (SectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.SectionType = None
+ self.Optional = False
+ self.FileType = None
+ self.StringData = None
+ self.FileName = None
+ self.FileExtension = None
+ self.BuildNum = None
+ self.KeepReloc = None
+
+## FV image section data in FDF
+#
+#
+class FvImageSectionClassObject (SectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.Fv = None
+ self.FvName = None
+ self.FvFileType = None
+ self.FvFileName = None
+ self.FvFileExtension = None
+ self.FvAddr = None
+
+## GUIDed section data in FDF
+#
+#
+class GuidSectionClassObject (SectionClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.NameGuid = None
+ self.SectionList = []
+ self.SectionType = None
+ self.ProcessRequired = False
+ self.AuthStatusValid = False
+ self.ExtraHeaderSize = -1
+ self.FvAddr = []
+ self.FvParentAddr = None
+ self.IncludeFvSection = False
+
+## UI section data in FDF
+#
+#
+class UiSectionClassObject (SectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.StringData = None
+ self.FileName = None
+
+## Version section data in FDF
+#
+#
+class VerSectionClassObject (SectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.BuildNum = None
+ self.StringData = None
+ self.FileName = None
+
+## Rule data in FDF
+#
+#
+class RuleClassObject :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.Arch = None
+ self.ModuleType = None # For Module Type
+ self.TemplateName = None
+ self.NameGuid = None
+ self.Fixed = False
+ self.Alignment = None
+ self.SectAlignment = None
+ self.CheckSum = False
+ self.FvFileType = None # for Ffs File Type
+ self.KeyStringList = []
+ self.KeepReloc = None
+
+## Complex rule data in FDF
+#
+#
+class RuleComplexFileClassObject(RuleClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ RuleClassObject.__init__(self)
+ self.SectionList = []
+
+## Simple rule data in FDF
+#
+#
+class RuleSimpleFileClassObject(RuleClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ RuleClassObject.__init__(self)
+ self.FileName = None
+ self.SectionType = ''
+ self.FileExtension = None
+
+## File extension rule data in FDF
+#
+#
+class RuleFileExtensionClassObject(RuleClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ RuleClassObject.__init__(self)
+ self.FileExtension = None
+
+## Capsule data in FDF
+#
+#
+class CapsuleClassObject :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.SpecName = None
+ self.UiCapsuleName = None
+ self.CreateFile = None
+ self.GroupIdNumber = None
+ # DefineVarDict[var] = value
+ self.DefineVarDict = {}
+ # SetVarDict[var] = value
+ self.SetVarDict = {}
+ # TokensDict[var] = value
+ self.TokensDict = {}
+ self.CapsuleDataList = []
+ self.FmpPayloadList = []
+
+## OptionROM data in FDF
+#
+#
+class OptionRomClassObject:
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.DriverName = None
+ self.FfsList = []
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/__init__.py
new file mode 100644
index 00000000..1b874ac8
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/CommonDataClass/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'CommonDataClass' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/C.g b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/C.g
new file mode 100755
index 00000000..232e0aa5
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/C.g
@@ -0,0 +1,673 @@
+/* @file
+ This file is used to be the grammar file of ECC tool
+
+ Copyright (c) 2009 - 2010, Intel Corporation. All rights reserved.<BR>
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+*/
+
+grammar C;
+options {
+ language=Python;
+ backtrack=true;
+ memoize=true;
+ k=2;
+}
+
+@lexer::header{
+## @file
+# The file defines the Lexer for C source files.
+#
+# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
+#
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at:
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+##
+}
+
+@header {
+## @file
+# The file defines the parser for C source files.
+#
+# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
+#
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at:
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+##
+
+import CodeFragment
+import FileProfile
+}
+
+@members {
+
+ def printTokenInfo(self, line, offset, tokenText):
+ print str(line)+ ',' + str(offset) + ':' + str(tokenText)
+
+ def StorePredicateExpression(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.PredicateExpressionList.append(PredExp)
+
+ def StoreEnumerationDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.EnumerationDefinitionList.append(EnumDef)
+
+ def StoreStructUnionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.StructUnionDefinitionList.append(SUDef)
+
+ def StoreTypedefDefinition(self, StartLine, StartOffset, EndLine, EndOffset, FromText, ToText):
+ Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.TypedefDefinitionList.append(Tdef)
+
+ def StoreFunctionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText, LeftBraceLine, LeftBraceOffset, DeclLine, DeclOffset):
+ FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
+ FileProfile.FunctionDefinitionList.append(FuncDef)
+
+ def StoreVariableDeclaration(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText):
+ VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.VariableDeclarationList.append(VarDecl)
+
+ def StoreFunctionCalling(self, StartLine, StartOffset, EndLine, EndOffset, FuncName, ParamList):
+ FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.FunctionCallingList.append(FuncCall)
+
+}
+
+translation_unit
+ : external_declaration*
+ ;
+
+
+/*function_declaration
+@after{
+ print $function_declaration.text
+}
+ : declaration_specifiers IDENTIFIER '(' parameter_list ')' ';'
+ ;
+*/
+external_declaration
+options {k=1;}
+/*@after{
+ print $external_declaration.text
+}*/
+ : ( declaration_specifiers? declarator declaration* '{' )=> function_definition
+ | declaration
+ | macro_statement (';')?
+ ;
+
+
+
+function_definition
+scope {
+ ModifierText;
+ DeclText;
+ LBLine;
+ LBOffset;
+ DeclLine;
+ DeclOffset;
+}
+@init {
+ $function_definition::ModifierText = '';
+ $function_definition::DeclText = '';
+ $function_definition::LBLine = 0;
+ $function_definition::LBOffset = 0;
+ $function_definition::DeclLine = 0;
+ $function_definition::DeclOffset = 0;
+}
+@after{
+ self.StoreFunctionDefinition($function_definition.start.line, $function_definition.start.charPositionInLine, $function_definition.stop.line, $function_definition.stop.charPositionInLine, $function_definition::ModifierText, $function_definition::DeclText, $function_definition::LBLine, $function_definition::LBOffset, $function_definition::DeclLine, $function_definition::DeclOffset)
+}
+ : d=declaration_specifiers? declarator
+ ( declaration+ a=compound_statement // K&R style
+ | b=compound_statement // ANSI style
+ ) {
+ if d != None:
+ $function_definition::ModifierText = $declaration_specifiers.text
+ else:
+ $function_definition::ModifierText = ''
+ $function_definition::DeclText = $declarator.text
+ $function_definition::DeclLine = $declarator.start.line
+ $function_definition::DeclOffset = $declarator.start.charPositionInLine
+ if a != None:
+ $function_definition::LBLine = $a.start.line
+ $function_definition::LBOffset = $a.start.charPositionInLine
+ else:
+ $function_definition::LBLine = $b.start.line
+ $function_definition::LBOffset = $b.start.charPositionInLine
+ }
+ ;
+
+declaration
+ : a='typedef' b=declaration_specifiers?
+ c=init_declarator_list d=';'
+ {
+ if b != None:
+ self.StoreTypedefDefinition($a.line, $a.charPositionInLine, $d.line, $d.charPositionInLine, $b.text, $c.text)
+ else:
+ self.StoreTypedefDefinition($a.line, $a.charPositionInLine, $d.line, $d.charPositionInLine, '', $c.text)
+ }
+ | s=declaration_specifiers t=init_declarator_list? e=';'
+ {
+ if t != None:
+ self.StoreVariableDeclaration($s.start.line, $s.start.charPositionInLine, $t.start.line, $t.start.charPositionInLine, $s.text, $t.text)
+ }
+ ;
+
+declaration_specifiers
+ : ( storage_class_specifier
+ | type_specifier
+ | type_qualifier
+ )+
+ ;
+
+init_declarator_list
+ : init_declarator (',' init_declarator)*
+ ;
+
+init_declarator
+ : declarator ('=' initializer)?
+ ;
+
+storage_class_specifier
+ : 'extern'
+ | 'static'
+ | 'auto'
+ | 'register'
+ | 'STATIC'
+ ;
+
+type_specifier
+ : 'void'
+ | 'char'
+ | 'short'
+ | 'int'
+ | 'long'
+ | 'float'
+ | 'double'
+ | 'signed'
+ | 'unsigned'
+ | s=struct_or_union_specifier
+ {
+ if s.stop != None:
+ self.StoreStructUnionDefinition($s.start.line, $s.start.charPositionInLine, $s.stop.line, $s.stop.charPositionInLine, $s.text)
+ }
+ | e=enum_specifier
+ {
+ if e.stop != None:
+ self.StoreEnumerationDefinition($e.start.line, $e.start.charPositionInLine, $e.stop.line, $e.stop.charPositionInLine, $e.text)
+ }
+ | (IDENTIFIER type_qualifier* declarator)=> type_id
+ ;
+
+type_id
+ : IDENTIFIER
+ //{self.printTokenInfo($a.line, $a.pos, $a.text)}
+ ;
+
+struct_or_union_specifier
+options {k=3;}
+ : struct_or_union IDENTIFIER? '{' struct_declaration_list '}'
+ | struct_or_union IDENTIFIER
+ ;
+
+struct_or_union
+ : 'struct'
+ | 'union'
+ ;
+
+struct_declaration_list
+ : struct_declaration+
+ ;
+
+struct_declaration
+ : specifier_qualifier_list struct_declarator_list ';'
+ ;
+
+specifier_qualifier_list
+ : ( type_qualifier | type_specifier )+
+ ;
+
+struct_declarator_list
+ : struct_declarator (',' struct_declarator)*
+ ;
+
+struct_declarator
+ : declarator (':' constant_expression)?
+ | ':' constant_expression
+ ;
+
+enum_specifier
+options {k=3;}
+ : 'enum' '{' enumerator_list ','? '}'
+ | 'enum' IDENTIFIER '{' enumerator_list ','? '}'
+ | 'enum' IDENTIFIER
+ ;
+
+enumerator_list
+ : enumerator (',' enumerator)*
+ ;
+
+enumerator
+ : IDENTIFIER ('=' constant_expression)?
+ ;
+
+type_qualifier
+ : 'const'
+ | 'volatile'
+ | 'IN'
+ | 'OUT'
+ | 'OPTIONAL'
+ | 'CONST'
+ | 'UNALIGNED'
+ | 'VOLATILE'
+ | 'GLOBAL_REMOVE_IF_UNREFERENCED'
+ | 'EFIAPI'
+ | 'EFI_BOOTSERVICE'
+ | 'EFI_RUNTIMESERVICE'
+ | 'PACKED'
+ ;
+
+declarator
+ : pointer? ('EFIAPI')? ('EFI_BOOTSERVICE')? ('EFI_RUNTIMESERVICE')? direct_declarator
+// | ('EFIAPI')? ('EFI_BOOTSERVICE')? ('EFI_RUNTIMESERVICE')? pointer? direct_declarator
+ | pointer
+ ;
+
+direct_declarator
+ : IDENTIFIER declarator_suffix*
+ | '(' ('EFIAPI')? declarator ')' declarator_suffix+
+ ;
+
+declarator_suffix
+ : '[' constant_expression ']'
+ | '[' ']'
+ | '(' parameter_type_list ')'
+ | '(' identifier_list ')'
+ | '(' ')'
+ ;
+
+pointer
+ : '*' type_qualifier+ pointer?
+ | '*' pointer
+ | '*'
+ ;
+
+parameter_type_list
+ : parameter_list (',' ('OPTIONAL')? '...')?
+ ;
+
+parameter_list
+ : parameter_declaration (',' ('OPTIONAL')? parameter_declaration)*
+ ;
+
+parameter_declaration
+ : declaration_specifiers (declarator|abstract_declarator)* ('OPTIONAL')?
+ //accomerdate user-defined type only, no declarator follow.
+ | pointer* IDENTIFIER
+ ;
+
+identifier_list
+ : IDENTIFIER
+ (',' IDENTIFIER)*
+ ;
+
+type_name
+ : specifier_qualifier_list abstract_declarator?
+ | type_id
+ ;
+
+abstract_declarator
+ : pointer direct_abstract_declarator?
+ | direct_abstract_declarator
+ ;
+
+direct_abstract_declarator
+ : ( '(' abstract_declarator ')' | abstract_declarator_suffix ) abstract_declarator_suffix*
+ ;
+
+abstract_declarator_suffix
+ : '[' ']'
+ | '[' constant_expression ']'
+ | '(' ')'
+ | '(' parameter_type_list ')'
+ ;
+
+initializer
+
+ : assignment_expression
+ | '{' initializer_list ','? '}'
+ ;
+
+initializer_list
+ : initializer (',' initializer )*
+ ;
+
+// E x p r e s s i o n s
+
+argument_expression_list
+ : assignment_expression ('OPTIONAL')? (',' assignment_expression ('OPTIONAL')?)*
+ ;
+
+additive_expression
+ : (multiplicative_expression) ('+' multiplicative_expression | '-' multiplicative_expression)*
+ ;
+
+multiplicative_expression
+ : (cast_expression) ('*' cast_expression | '/' cast_expression | '%' cast_expression)*
+ ;
+
+cast_expression
+ : '(' type_name ')' cast_expression
+ | unary_expression
+ ;
+
+unary_expression
+ : postfix_expression
+ | '++' unary_expression
+ | '--' unary_expression
+ | unary_operator cast_expression
+ | 'sizeof' unary_expression
+ | 'sizeof' '(' type_name ')'
+ ;
+
+postfix_expression
+scope {
+ FuncCallText;
+}
+@init {
+ $postfix_expression::FuncCallText = '';
+}
+ : p=primary_expression {$postfix_expression::FuncCallText += $p.text}
+ ( '[' expression ']'
+ | '(' a=')'{self.StoreFunctionCalling($p.start.line, $p.start.charPositionInLine, $a.line, $a.charPositionInLine, $postfix_expression::FuncCallText, '')}
+ | '(' c=argument_expression_list b=')' {self.StoreFunctionCalling($p.start.line, $p.start.charPositionInLine, $b.line, $b.charPositionInLine, $postfix_expression::FuncCallText, $c.text)}
+ | '(' macro_parameter_list ')'
+ | '.' x=IDENTIFIER {$postfix_expression::FuncCallText += '.' + $x.text}
+ | '*' y=IDENTIFIER {$postfix_expression::FuncCallText = $y.text}
+ | '->' z=IDENTIFIER {$postfix_expression::FuncCallText += '->' + $z.text}
+ | '++'
+ | '--'
+ )*
+ ;
+
+macro_parameter_list
+ : parameter_declaration (',' parameter_declaration)*
+ ;
+
+unary_operator
+ : '&'
+ | '*'
+ | '+'
+ | '-'
+ | '~'
+ | '!'
+ ;
+
+primary_expression
+ : IDENTIFIER
+ | constant
+ | '(' expression ')'
+ ;
+
+constant
+ : HEX_LITERAL
+ | OCTAL_LITERAL
+ | DECIMAL_LITERAL
+ | CHARACTER_LITERAL
+ | (IDENTIFIER* STRING_LITERAL+)+ IDENTIFIER*
+ | FLOATING_POINT_LITERAL
+ ;
+
+/////
+
+expression
+ : assignment_expression (',' assignment_expression)*
+ ;
+
+constant_expression
+ : conditional_expression
+ ;
+
+assignment_expression
+ : lvalue assignment_operator assignment_expression
+ | conditional_expression
+ ;
+
+lvalue
+ : unary_expression
+ ;
+
+assignment_operator
+ : '='
+ | '*='
+ | '/='
+ | '%='
+ | '+='
+ | '-='
+ | '<<='
+ | '>>='
+ | '&='
+ | '^='
+ | '|='
+ ;
+
+conditional_expression
+ : e=logical_or_expression ('?' expression ':' conditional_expression {self.StorePredicateExpression($e.start.line, $e.start.charPositionInLine, $e.stop.line, $e.stop.charPositionInLine, $e.text)})?
+ ;
+
+logical_or_expression
+ : logical_and_expression ('||' logical_and_expression)*
+ ;
+
+logical_and_expression
+ : inclusive_or_expression ('&&' inclusive_or_expression)*
+ ;
+
+inclusive_or_expression
+ : exclusive_or_expression ('|' exclusive_or_expression)*
+ ;
+
+exclusive_or_expression
+ : and_expression ('^' and_expression)*
+ ;
+
+and_expression
+ : equality_expression ('&' equality_expression)*
+ ;
+equality_expression
+ : relational_expression (('=='|'!=') relational_expression )*
+ ;
+
+relational_expression
+ : shift_expression (('<'|'>'|'<='|'>=') shift_expression)*
+ ;
+
+shift_expression
+ : additive_expression (('<<'|'>>') additive_expression)*
+ ;
+
+// S t a t e m e n t s
+
+statement
+ : labeled_statement
+ | compound_statement
+ | expression_statement
+ | selection_statement
+ | iteration_statement
+ | jump_statement
+ | macro_statement
+ | asm2_statement
+ | asm1_statement
+ | asm_statement
+ | declaration
+ ;
+
+asm2_statement
+ : '__asm__'? IDENTIFIER '(' (~(';'))* ')' ';'
+ ;
+
+asm1_statement
+ : '_asm' '{' (~('}'))* '}'
+ ;
+
+asm_statement
+ : '__asm' '{' (~('}'))* '}'
+ ;
+
+macro_statement
+ : IDENTIFIER '(' declaration* statement_list? expression? ')'
+ ;
+
+labeled_statement
+ : IDENTIFIER ':' statement
+ | 'case' constant_expression ':' statement
+ | 'default' ':' statement
+ ;
+
+compound_statement
+ : '{' declaration* statement_list? '}'
+ ;
+
+statement_list
+ : statement+
+ ;
+
+expression_statement
+ : ';'
+ | expression ';'
+ ;
+
+selection_statement
+ : 'if' '(' e=expression ')' {self.StorePredicateExpression($e.start.line, $e.start.charPositionInLine, $e.stop.line, $e.stop.charPositionInLine, $e.text)} statement (options {k=1; backtrack=false;}:'else' statement)?
+ | 'switch' '(' expression ')' statement
+ ;
+
+iteration_statement
+ : 'while' '(' e=expression ')' statement {self.StorePredicateExpression($e.start.line, $e.start.charPositionInLine, $e.stop.line, $e.stop.charPositionInLine, $e.text)}
+ | 'do' statement 'while' '(' e=expression ')' ';' {self.StorePredicateExpression($e.start.line, $e.start.charPositionInLine, $e.stop.line, $e.stop.charPositionInLine, $e.text)}
+ | 'for' '(' expression_statement e=expression_statement expression? ')' statement {self.StorePredicateExpression($e.start.line, $e.start.charPositionInLine, $e.stop.line, $e.stop.charPositionInLine, $e.text)}
+ ;
+
+jump_statement
+ : 'goto' IDENTIFIER ';'
+ | 'continue' ';'
+ | 'break' ';'
+ | 'return' ';'
+ | 'return' expression ';'
+ ;
+
+IDENTIFIER
+ : LETTER (LETTER|'0'..'9')*
+ ;
+
+fragment
+LETTER
+ : '$'
+ | 'A'..'Z'
+ | 'a'..'z'
+ | '_'
+ ;
+
+CHARACTER_LITERAL
+ : ('L')? '\'' ( EscapeSequence | ~('\''|'\\') ) '\''
+ ;
+
+STRING_LITERAL
+ : ('L')? '"' ( EscapeSequence | ~('\\'|'"') )* '"'
+ ;
+
+HEX_LITERAL : '0' ('x'|'X') HexDigit+ IntegerTypeSuffix? ;
+
+DECIMAL_LITERAL : ('0' | '1'..'9' '0'..'9'*) IntegerTypeSuffix? ;
+
+OCTAL_LITERAL : '0' ('0'..'7')+ IntegerTypeSuffix? ;
+
+fragment
+HexDigit : ('0'..'9'|'a'..'f'|'A'..'F') ;
+
+fragment
+IntegerTypeSuffix
+ : ('u'|'U')
+ | ('l'|'L')
+ | ('u'|'U') ('l'|'L')
+ | ('u'|'U') ('l'|'L') ('l'|'L')
+ ;
+
+FLOATING_POINT_LITERAL
+ : ('0'..'9')+ '.' ('0'..'9')* Exponent? FloatTypeSuffix?
+ | '.' ('0'..'9')+ Exponent? FloatTypeSuffix?
+ | ('0'..'9')+ Exponent FloatTypeSuffix?
+ | ('0'..'9')+ Exponent? FloatTypeSuffix
+ ;
+
+fragment
+Exponent : ('e'|'E') ('+'|'-')? ('0'..'9')+ ;
+
+fragment
+FloatTypeSuffix : ('f'|'F'|'d'|'D') ;
+
+fragment
+EscapeSequence
+ : '\\' ('b'|'t'|'n'|'f'|'r'|'\"'|'\''|'\\')
+ | OctalEscape
+ ;
+
+fragment
+OctalEscape
+ : '\\' ('0'..'3') ('0'..'7') ('0'..'7')
+ | '\\' ('0'..'7') ('0'..'7')
+ | '\\' ('0'..'7')
+ ;
+
+fragment
+UnicodeEscape
+ : '\\' 'u' HexDigit HexDigit HexDigit HexDigit
+ ;
+
+WS : (' '|'\r'|'\t'|'\u000C'|'\n') {$channel=HIDDEN;}
+ ;
+
+// ignore '\' of line concatenation
+BS : ('\\') {$channel=HIDDEN;}
+ ;
+
+// ignore function modifiers
+//FUNC_MODIFIERS : 'EFIAPI' {$channel=HIDDEN;}
+// ;
+
+UnicodeVocabulary
+ : '\u0003'..'\uFFFE'
+ ;
+COMMENT
+ : '/*' ( options {greedy=false;} : . )* '*/' {$channel=HIDDEN;}
+ ;
+
+
+LINE_COMMENT
+ : '//' ~('\n'|'\r')* '\r'? '\n' {$channel=HIDDEN;}
+ ;
+
+// ignore #line info for now
+LINE_COMMAND
+ : '#' ~('\n'|'\r')* '\r'? '\n' {$channel=HIDDEN;}
+ ;
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser3/CLexer.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser3/CLexer.py
new file mode 100755
index 00000000..f57d8e1e
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser3/CLexer.py
@@ -0,0 +1,4941 @@
+# $ANTLR 3.0.1 C.g 2010-02-23 09:58:53
+
+from antlr3 import *
+from antlr3.compat import set, frozenset
+
+## @file
+# The file defines the Lexer for C source files.
+#
+# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+
+
+# for convenience in actions
+HIDDEN = BaseRecognizer.HIDDEN
+
+# token types
+T114=114
+T115=115
+T116=116
+T117=117
+FloatTypeSuffix=16
+LETTER=11
+T29=29
+T28=28
+T27=27
+T26=26
+T25=25
+EOF=-1
+STRING_LITERAL=9
+FLOATING_POINT_LITERAL=10
+T38=38
+T37=37
+T39=39
+T34=34
+COMMENT=22
+T33=33
+T36=36
+T35=35
+T30=30
+T32=32
+T31=31
+LINE_COMMENT=23
+IntegerTypeSuffix=14
+CHARACTER_LITERAL=8
+T49=49
+T48=48
+T100=100
+T43=43
+T42=42
+T102=102
+T41=41
+T101=101
+T40=40
+T47=47
+T46=46
+T45=45
+T44=44
+T109=109
+T107=107
+T108=108
+T105=105
+WS=19
+T106=106
+T103=103
+T104=104
+T50=50
+LINE_COMMAND=24
+T59=59
+T113=113
+T52=52
+T112=112
+T51=51
+T111=111
+T54=54
+T110=110
+EscapeSequence=12
+DECIMAL_LITERAL=7
+T53=53
+T56=56
+T55=55
+T58=58
+T57=57
+T75=75
+T76=76
+T73=73
+T74=74
+T79=79
+T77=77
+T78=78
+Exponent=15
+HexDigit=13
+T72=72
+T71=71
+T70=70
+T62=62
+T63=63
+T64=64
+T65=65
+T66=66
+T67=67
+T68=68
+T69=69
+IDENTIFIER=4
+UnicodeVocabulary=21
+HEX_LITERAL=5
+T61=61
+T60=60
+T99=99
+T97=97
+BS=20
+T98=98
+T95=95
+T96=96
+OCTAL_LITERAL=6
+T94=94
+Tokens=118
+T93=93
+T92=92
+T91=91
+T90=90
+T88=88
+T89=89
+T84=84
+T85=85
+T86=86
+T87=87
+UnicodeEscape=18
+T81=81
+T80=80
+T83=83
+OctalEscape=17
+T82=82
+
+class CLexer(Lexer):
+
+ grammarFileName = "C.g"
+
+ def __init__(self, input=None):
+ Lexer.__init__(self, input)
+ self.dfa25 = self.DFA25(
+ self, 25,
+ eot = self.DFA25_eot,
+ eof = self.DFA25_eof,
+ min = self.DFA25_min,
+ max = self.DFA25_max,
+ accept = self.DFA25_accept,
+ special = self.DFA25_special,
+ transition = self.DFA25_transition
+ )
+ self.dfa35 = self.DFA35(
+ self, 35,
+ eot = self.DFA35_eot,
+ eof = self.DFA35_eof,
+ min = self.DFA35_min,
+ max = self.DFA35_max,
+ accept = self.DFA35_accept,
+ special = self.DFA35_special,
+ transition = self.DFA35_transition
+ )
+
+
+
+
+
+
+ # $ANTLR start T25
+ def mT25(self, ):
+
+ try:
+ self.type = T25
+
+ # C.g:27:5: ( ';' )
+ # C.g:27:7: ';'
+ self.match(u';')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T25
+
+
+
+ # $ANTLR start T26
+ def mT26(self, ):
+
+ try:
+ self.type = T26
+
+ # C.g:28:5: ( 'typedef' )
+ # C.g:28:7: 'typedef'
+ self.match("typedef")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T26
+
+
+
+ # $ANTLR start T27
+ def mT27(self, ):
+
+ try:
+ self.type = T27
+
+ # C.g:29:5: ( ',' )
+ # C.g:29:7: ','
+ self.match(u',')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T27
+
+
+
+ # $ANTLR start T28
+ def mT28(self, ):
+
+ try:
+ self.type = T28
+
+ # C.g:30:5: ( '=' )
+ # C.g:30:7: '='
+ self.match(u'=')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T28
+
+
+
+ # $ANTLR start T29
+ def mT29(self, ):
+
+ try:
+ self.type = T29
+
+ # C.g:31:5: ( 'extern' )
+ # C.g:31:7: 'extern'
+ self.match("extern")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T29
+
+
+
+ # $ANTLR start T30
+ def mT30(self, ):
+
+ try:
+ self.type = T30
+
+ # C.g:32:5: ( 'static' )
+ # C.g:32:7: 'static'
+ self.match("static")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T30
+
+
+
+ # $ANTLR start T31
+ def mT31(self, ):
+
+ try:
+ self.type = T31
+
+ # C.g:33:5: ( 'auto' )
+ # C.g:33:7: 'auto'
+ self.match("auto")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T31
+
+
+
+ # $ANTLR start T32
+ def mT32(self, ):
+
+ try:
+ self.type = T32
+
+ # C.g:34:5: ( 'register' )
+ # C.g:34:7: 'register'
+ self.match("register")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T32
+
+
+
+ # $ANTLR start T33
+ def mT33(self, ):
+
+ try:
+ self.type = T33
+
+ # C.g:35:5: ( 'STATIC' )
+ # C.g:35:7: 'STATIC'
+ self.match("STATIC")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T33
+
+
+
+ # $ANTLR start T34
+ def mT34(self, ):
+
+ try:
+ self.type = T34
+
+ # C.g:36:5: ( 'void' )
+ # C.g:36:7: 'void'
+ self.match("void")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T34
+
+
+
+ # $ANTLR start T35
+ def mT35(self, ):
+
+ try:
+ self.type = T35
+
+ # C.g:37:5: ( 'char' )
+ # C.g:37:7: 'char'
+ self.match("char")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T35
+
+
+
+ # $ANTLR start T36
+ def mT36(self, ):
+
+ try:
+ self.type = T36
+
+ # C.g:38:5: ( 'short' )
+ # C.g:38:7: 'short'
+ self.match("short")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T36
+
+
+
+ # $ANTLR start T37
+ def mT37(self, ):
+
+ try:
+ self.type = T37
+
+ # C.g:39:5: ( 'int' )
+ # C.g:39:7: 'int'
+ self.match("int")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T37
+
+
+
+ # $ANTLR start T38
+ def mT38(self, ):
+
+ try:
+ self.type = T38
+
+ # C.g:40:5: ( 'long' )
+ # C.g:40:7: 'long'
+ self.match("long")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T38
+
+
+
+ # $ANTLR start T39
+ def mT39(self, ):
+
+ try:
+ self.type = T39
+
+ # C.g:41:5: ( 'float' )
+ # C.g:41:7: 'float'
+ self.match("float")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T39
+
+
+
+ # $ANTLR start T40
+ def mT40(self, ):
+
+ try:
+ self.type = T40
+
+ # C.g:42:5: ( 'double' )
+ # C.g:42:7: 'double'
+ self.match("double")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T40
+
+
+
+ # $ANTLR start T41
+ def mT41(self, ):
+
+ try:
+ self.type = T41
+
+ # C.g:43:5: ( 'signed' )
+ # C.g:43:7: 'signed'
+ self.match("signed")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T41
+
+
+
+ # $ANTLR start T42
+ def mT42(self, ):
+
+ try:
+ self.type = T42
+
+ # C.g:44:5: ( 'unsigned' )
+ # C.g:44:7: 'unsigned'
+ self.match("unsigned")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T42
+
+
+
+ # $ANTLR start T43
+ def mT43(self, ):
+
+ try:
+ self.type = T43
+
+ # C.g:45:5: ( '{' )
+ # C.g:45:7: '{'
+ self.match(u'{')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T43
+
+
+
+ # $ANTLR start T44
+ def mT44(self, ):
+
+ try:
+ self.type = T44
+
+ # C.g:46:5: ( '}' )
+ # C.g:46:7: '}'
+ self.match(u'}')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T44
+
+
+
+ # $ANTLR start T45
+ def mT45(self, ):
+
+ try:
+ self.type = T45
+
+ # C.g:47:5: ( 'struct' )
+ # C.g:47:7: 'struct'
+ self.match("struct")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T45
+
+
+
+ # $ANTLR start T46
+ def mT46(self, ):
+
+ try:
+ self.type = T46
+
+ # C.g:48:5: ( 'union' )
+ # C.g:48:7: 'union'
+ self.match("union")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T46
+
+
+
+ # $ANTLR start T47
+ def mT47(self, ):
+
+ try:
+ self.type = T47
+
+ # C.g:49:5: ( ':' )
+ # C.g:49:7: ':'
+ self.match(u':')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T47
+
+
+
+ # $ANTLR start T48
+ def mT48(self, ):
+
+ try:
+ self.type = T48
+
+ # C.g:50:5: ( 'enum' )
+ # C.g:50:7: 'enum'
+ self.match("enum")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T48
+
+
+
+ # $ANTLR start T49
+ def mT49(self, ):
+
+ try:
+ self.type = T49
+
+ # C.g:51:5: ( 'const' )
+ # C.g:51:7: 'const'
+ self.match("const")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T49
+
+
+
+ # $ANTLR start T50
+ def mT50(self, ):
+
+ try:
+ self.type = T50
+
+ # C.g:52:5: ( 'volatile' )
+ # C.g:52:7: 'volatile'
+ self.match("volatile")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T50
+
+
+
+ # $ANTLR start T51
+ def mT51(self, ):
+
+ try:
+ self.type = T51
+
+ # C.g:53:5: ( 'IN' )
+ # C.g:53:7: 'IN'
+ self.match("IN")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T51
+
+
+
+ # $ANTLR start T52
+ def mT52(self, ):
+
+ try:
+ self.type = T52
+
+ # C.g:54:5: ( 'OUT' )
+ # C.g:54:7: 'OUT'
+ self.match("OUT")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T52
+
+
+
+ # $ANTLR start T53
+ def mT53(self, ):
+
+ try:
+ self.type = T53
+
+ # C.g:55:5: ( 'OPTIONAL' )
+ # C.g:55:7: 'OPTIONAL'
+ self.match("OPTIONAL")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T53
+
+
+
+ # $ANTLR start T54
+ def mT54(self, ):
+
+ try:
+ self.type = T54
+
+ # C.g:56:5: ( 'CONST' )
+ # C.g:56:7: 'CONST'
+ self.match("CONST")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T54
+
+
+
+ # $ANTLR start T55
+ def mT55(self, ):
+
+ try:
+ self.type = T55
+
+ # C.g:57:5: ( 'UNALIGNED' )
+ # C.g:57:7: 'UNALIGNED'
+ self.match("UNALIGNED")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T55
+
+
+
+ # $ANTLR start T56
+ def mT56(self, ):
+
+ try:
+ self.type = T56
+
+ # C.g:58:5: ( 'VOLATILE' )
+ # C.g:58:7: 'VOLATILE'
+ self.match("VOLATILE")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T56
+
+
+
+ # $ANTLR start T57
+ def mT57(self, ):
+
+ try:
+ self.type = T57
+
+ # C.g:59:5: ( 'GLOBAL_REMOVE_IF_UNREFERENCED' )
+ # C.g:59:7: 'GLOBAL_REMOVE_IF_UNREFERENCED'
+ self.match("GLOBAL_REMOVE_IF_UNREFERENCED")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T57
+
+
+
+ # $ANTLR start T58
+ def mT58(self, ):
+
+ try:
+ self.type = T58
+
+ # C.g:60:5: ( 'EFIAPI' )
+ # C.g:60:7: 'EFIAPI'
+ self.match("EFIAPI")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T58
+
+
+
+ # $ANTLR start T59
+ def mT59(self, ):
+
+ try:
+ self.type = T59
+
+ # C.g:61:5: ( 'EFI_BOOTSERVICE' )
+ # C.g:61:7: 'EFI_BOOTSERVICE'
+ self.match("EFI_BOOTSERVICE")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T59
+
+
+
+ # $ANTLR start T60
+ def mT60(self, ):
+
+ try:
+ self.type = T60
+
+ # C.g:62:5: ( 'EFI_RUNTIMESERVICE' )
+ # C.g:62:7: 'EFI_RUNTIMESERVICE'
+ self.match("EFI_RUNTIMESERVICE")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T60
+
+
+
+ # $ANTLR start T61
+ def mT61(self, ):
+
+ try:
+ self.type = T61
+
+ # C.g:63:5: ( 'PACKED' )
+ # C.g:63:7: 'PACKED'
+ self.match("PACKED")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T61
+
+
+
+ # $ANTLR start T62
+ def mT62(self, ):
+
+ try:
+ self.type = T62
+
+ # C.g:64:5: ( '(' )
+ # C.g:64:7: '('
+ self.match(u'(')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T62
+
+
+
+ # $ANTLR start T63
+ def mT63(self, ):
+
+ try:
+ self.type = T63
+
+ # C.g:65:5: ( ')' )
+ # C.g:65:7: ')'
+ self.match(u')')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T63
+
+
+
+ # $ANTLR start T64
+ def mT64(self, ):
+
+ try:
+ self.type = T64
+
+ # C.g:66:5: ( '[' )
+ # C.g:66:7: '['
+ self.match(u'[')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T64
+
+
+
+ # $ANTLR start T65
+ def mT65(self, ):
+
+ try:
+ self.type = T65
+
+ # C.g:67:5: ( ']' )
+ # C.g:67:7: ']'
+ self.match(u']')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T65
+
+
+
+ # $ANTLR start T66
+ def mT66(self, ):
+
+ try:
+ self.type = T66
+
+ # C.g:68:5: ( '*' )
+ # C.g:68:7: '*'
+ self.match(u'*')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T66
+
+
+
+ # $ANTLR start T67
+ def mT67(self, ):
+
+ try:
+ self.type = T67
+
+ # C.g:69:5: ( '...' )
+ # C.g:69:7: '...'
+ self.match("...")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T67
+
+
+
+ # $ANTLR start T68
+ def mT68(self, ):
+
+ try:
+ self.type = T68
+
+ # C.g:70:5: ( '+' )
+ # C.g:70:7: '+'
+ self.match(u'+')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T68
+
+
+
+ # $ANTLR start T69
+ def mT69(self, ):
+
+ try:
+ self.type = T69
+
+ # C.g:71:5: ( '-' )
+ # C.g:71:7: '-'
+ self.match(u'-')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T69
+
+
+
+ # $ANTLR start T70
+ def mT70(self, ):
+
+ try:
+ self.type = T70
+
+ # C.g:72:5: ( '/' )
+ # C.g:72:7: '/'
+ self.match(u'/')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T70
+
+
+
+ # $ANTLR start T71
+ def mT71(self, ):
+
+ try:
+ self.type = T71
+
+ # C.g:73:5: ( '%' )
+ # C.g:73:7: '%'
+ self.match(u'%')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T71
+
+
+
+ # $ANTLR start T72
+ def mT72(self, ):
+
+ try:
+ self.type = T72
+
+ # C.g:74:5: ( '++' )
+ # C.g:74:7: '++'
+ self.match("++")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T72
+
+
+
+ # $ANTLR start T73
+ def mT73(self, ):
+
+ try:
+ self.type = T73
+
+ # C.g:75:5: ( '--' )
+ # C.g:75:7: '--'
+ self.match("--")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T73
+
+
+
+ # $ANTLR start T74
+ def mT74(self, ):
+
+ try:
+ self.type = T74
+
+ # C.g:76:5: ( 'sizeof' )
+ # C.g:76:7: 'sizeof'
+ self.match("sizeof")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T74
+
+
+
+ # $ANTLR start T75
+ def mT75(self, ):
+
+ try:
+ self.type = T75
+
+ # C.g:77:5: ( '.' )
+ # C.g:77:7: '.'
+ self.match(u'.')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T75
+
+
+
+ # $ANTLR start T76
+ def mT76(self, ):
+
+ try:
+ self.type = T76
+
+ # C.g:78:5: ( '->' )
+ # C.g:78:7: '->'
+ self.match("->")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T76
+
+
+
+ # $ANTLR start T77
+ def mT77(self, ):
+
+ try:
+ self.type = T77
+
+ # C.g:79:5: ( '&' )
+ # C.g:79:7: '&'
+ self.match(u'&')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T77
+
+
+
+ # $ANTLR start T78
+ def mT78(self, ):
+
+ try:
+ self.type = T78
+
+ # C.g:80:5: ( '~' )
+ # C.g:80:7: '~'
+ self.match(u'~')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T78
+
+
+
+ # $ANTLR start T79
+ def mT79(self, ):
+
+ try:
+ self.type = T79
+
+ # C.g:81:5: ( '!' )
+ # C.g:81:7: '!'
+ self.match(u'!')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T79
+
+
+
+ # $ANTLR start T80
+ def mT80(self, ):
+
+ try:
+ self.type = T80
+
+ # C.g:82:5: ( '*=' )
+ # C.g:82:7: '*='
+ self.match("*=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T80
+
+
+
+ # $ANTLR start T81
+ def mT81(self, ):
+
+ try:
+ self.type = T81
+
+ # C.g:83:5: ( '/=' )
+ # C.g:83:7: '/='
+ self.match("/=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T81
+
+
+
+ # $ANTLR start T82
+ def mT82(self, ):
+
+ try:
+ self.type = T82
+
+ # C.g:84:5: ( '%=' )
+ # C.g:84:7: '%='
+ self.match("%=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T82
+
+
+
+ # $ANTLR start T83
+ def mT83(self, ):
+
+ try:
+ self.type = T83
+
+ # C.g:85:5: ( '+=' )
+ # C.g:85:7: '+='
+ self.match("+=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T83
+
+
+
+ # $ANTLR start T84
+ def mT84(self, ):
+
+ try:
+ self.type = T84
+
+ # C.g:86:5: ( '-=' )
+ # C.g:86:7: '-='
+ self.match("-=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T84
+
+
+
+ # $ANTLR start T85
+ def mT85(self, ):
+
+ try:
+ self.type = T85
+
+ # C.g:87:5: ( '<<=' )
+ # C.g:87:7: '<<='
+ self.match("<<=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T85
+
+
+
+ # $ANTLR start T86
+ def mT86(self, ):
+
+ try:
+ self.type = T86
+
+ # C.g:88:5: ( '>>=' )
+ # C.g:88:7: '>>='
+ self.match(">>=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T86
+
+
+
+ # $ANTLR start T87
+ def mT87(self, ):
+
+ try:
+ self.type = T87
+
+ # C.g:89:5: ( '&=' )
+ # C.g:89:7: '&='
+ self.match("&=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T87
+
+
+
+ # $ANTLR start T88
+ def mT88(self, ):
+
+ try:
+ self.type = T88
+
+ # C.g:90:5: ( '^=' )
+ # C.g:90:7: '^='
+ self.match("^=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T88
+
+
+
+ # $ANTLR start T89
+ def mT89(self, ):
+
+ try:
+ self.type = T89
+
+ # C.g:91:5: ( '|=' )
+ # C.g:91:7: '|='
+ self.match("|=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T89
+
+
+
+ # $ANTLR start T90
+ def mT90(self, ):
+
+ try:
+ self.type = T90
+
+ # C.g:92:5: ( '?' )
+ # C.g:92:7: '?'
+ self.match(u'?')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T90
+
+
+
+ # $ANTLR start T91
+ def mT91(self, ):
+
+ try:
+ self.type = T91
+
+ # C.g:93:5: ( '||' )
+ # C.g:93:7: '||'
+ self.match("||")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T91
+
+
+
+ # $ANTLR start T92
+ def mT92(self, ):
+
+ try:
+ self.type = T92
+
+ # C.g:94:5: ( '&&' )
+ # C.g:94:7: '&&'
+ self.match("&&")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T92
+
+
+
+ # $ANTLR start T93
+ def mT93(self, ):
+
+ try:
+ self.type = T93
+
+ # C.g:95:5: ( '|' )
+ # C.g:95:7: '|'
+ self.match(u'|')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T93
+
+
+
+ # $ANTLR start T94
+ def mT94(self, ):
+
+ try:
+ self.type = T94
+
+ # C.g:96:5: ( '^' )
+ # C.g:96:7: '^'
+ self.match(u'^')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T94
+
+
+
+ # $ANTLR start T95
+ def mT95(self, ):
+
+ try:
+ self.type = T95
+
+ # C.g:97:5: ( '==' )
+ # C.g:97:7: '=='
+ self.match("==")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T95
+
+
+
+ # $ANTLR start T96
+ def mT96(self, ):
+
+ try:
+ self.type = T96
+
+ # C.g:98:5: ( '!=' )
+ # C.g:98:7: '!='
+ self.match("!=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T96
+
+
+
+ # $ANTLR start T97
+ def mT97(self, ):
+
+ try:
+ self.type = T97
+
+ # C.g:99:5: ( '<' )
+ # C.g:99:7: '<'
+ self.match(u'<')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T97
+
+
+
+ # $ANTLR start T98
+ def mT98(self, ):
+
+ try:
+ self.type = T98
+
+ # C.g:100:5: ( '>' )
+ # C.g:100:7: '>'
+ self.match(u'>')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T98
+
+
+
+ # $ANTLR start T99
+ def mT99(self, ):
+
+ try:
+ self.type = T99
+
+ # C.g:101:5: ( '<=' )
+ # C.g:101:7: '<='
+ self.match("<=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T99
+
+
+
+ # $ANTLR start T100
+ def mT100(self, ):
+
+ try:
+ self.type = T100
+
+ # C.g:102:6: ( '>=' )
+ # C.g:102:8: '>='
+ self.match(">=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T100
+
+
+
+ # $ANTLR start T101
+ def mT101(self, ):
+
+ try:
+ self.type = T101
+
+ # C.g:103:6: ( '<<' )
+ # C.g:103:8: '<<'
+ self.match("<<")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T101
+
+
+
+ # $ANTLR start T102
+ def mT102(self, ):
+
+ try:
+ self.type = T102
+
+ # C.g:104:6: ( '>>' )
+ # C.g:104:8: '>>'
+ self.match(">>")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T102
+
+
+
+ # $ANTLR start T103
+ def mT103(self, ):
+
+ try:
+ self.type = T103
+
+ # C.g:105:6: ( '__asm__' )
+ # C.g:105:8: '__asm__'
+ self.match("__asm__")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T103
+
+
+
+ # $ANTLR start T104
+ def mT104(self, ):
+
+ try:
+ self.type = T104
+
+ # C.g:106:6: ( '_asm' )
+ # C.g:106:8: '_asm'
+ self.match("_asm")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T104
+
+
+
+ # $ANTLR start T105
+ def mT105(self, ):
+
+ try:
+ self.type = T105
+
+ # C.g:107:6: ( '__asm' )
+ # C.g:107:8: '__asm'
+ self.match("__asm")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T105
+
+
+
+ # $ANTLR start T106
+ def mT106(self, ):
+
+ try:
+ self.type = T106
+
+ # C.g:108:6: ( 'case' )
+ # C.g:108:8: 'case'
+ self.match("case")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T106
+
+
+
+ # $ANTLR start T107
+ def mT107(self, ):
+
+ try:
+ self.type = T107
+
+ # C.g:109:6: ( 'default' )
+ # C.g:109:8: 'default'
+ self.match("default")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T107
+
+
+
+ # $ANTLR start T108
+ def mT108(self, ):
+
+ try:
+ self.type = T108
+
+ # C.g:110:6: ( 'if' )
+ # C.g:110:8: 'if'
+ self.match("if")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T108
+
+
+
+ # $ANTLR start T109
+ def mT109(self, ):
+
+ try:
+ self.type = T109
+
+ # C.g:111:6: ( 'else' )
+ # C.g:111:8: 'else'
+ self.match("else")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T109
+
+
+
+ # $ANTLR start T110
+ def mT110(self, ):
+
+ try:
+ self.type = T110
+
+ # C.g:112:6: ( 'switch' )
+ # C.g:112:8: 'switch'
+ self.match("switch")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T110
+
+
+
+ # $ANTLR start T111
+ def mT111(self, ):
+
+ try:
+ self.type = T111
+
+ # C.g:113:6: ( 'while' )
+ # C.g:113:8: 'while'
+ self.match("while")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T111
+
+
+
+ # $ANTLR start T112
+ def mT112(self, ):
+
+ try:
+ self.type = T112
+
+ # C.g:114:6: ( 'do' )
+ # C.g:114:8: 'do'
+ self.match("do")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T112
+
+
+
+ # $ANTLR start T113
+ def mT113(self, ):
+
+ try:
+ self.type = T113
+
+ # C.g:115:6: ( 'for' )
+ # C.g:115:8: 'for'
+ self.match("for")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T113
+
+
+
+ # $ANTLR start T114
+ def mT114(self, ):
+
+ try:
+ self.type = T114
+
+ # C.g:116:6: ( 'goto' )
+ # C.g:116:8: 'goto'
+ self.match("goto")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T114
+
+
+
+ # $ANTLR start T115
+ def mT115(self, ):
+
+ try:
+ self.type = T115
+
+ # C.g:117:6: ( 'continue' )
+ # C.g:117:8: 'continue'
+ self.match("continue")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T115
+
+
+
+ # $ANTLR start T116
+ def mT116(self, ):
+
+ try:
+ self.type = T116
+
+ # C.g:118:6: ( 'break' )
+ # C.g:118:8: 'break'
+ self.match("break")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T116
+
+
+
+ # $ANTLR start T117
+ def mT117(self, ):
+
+ try:
+ self.type = T117
+
+ # C.g:119:6: ( 'return' )
+ # C.g:119:8: 'return'
+ self.match("return")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T117
+
+
+
+ # $ANTLR start IDENTIFIER
+ def mIDENTIFIER(self, ):
+
+ try:
+ self.type = IDENTIFIER
+
+ # C.g:586:2: ( LETTER ( LETTER | '0' .. '9' )* )
+ # C.g:586:4: LETTER ( LETTER | '0' .. '9' )*
+ self.mLETTER()
+
+ # C.g:586:11: ( LETTER | '0' .. '9' )*
+ while True: #loop1
+ alt1 = 2
+ LA1_0 = self.input.LA(1)
+
+ if (LA1_0 == u'$' or (u'0' <= LA1_0 <= u'9') or (u'A' <= LA1_0 <= u'Z') or LA1_0 == u'_' or (u'a' <= LA1_0 <= u'z')) :
+ alt1 = 1
+
+
+ if alt1 == 1:
+ # C.g:
+ if self.input.LA(1) == u'$' or (u'0' <= self.input.LA(1) <= u'9') or (u'A' <= self.input.LA(1) <= u'Z') or self.input.LA(1) == u'_' or (u'a' <= self.input.LA(1) <= u'z'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ else:
+ break #loop1
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end IDENTIFIER
+
+
+
+ # $ANTLR start LETTER
+ def mLETTER(self, ):
+
+ try:
+ # C.g:591:2: ( '$' | 'A' .. 'Z' | 'a' .. 'z' | '_' )
+ # C.g:
+ if self.input.LA(1) == u'$' or (u'A' <= self.input.LA(1) <= u'Z') or self.input.LA(1) == u'_' or (u'a' <= self.input.LA(1) <= u'z'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end LETTER
+
+
+
+ # $ANTLR start CHARACTER_LITERAL
+ def mCHARACTER_LITERAL(self, ):
+
+ try:
+ self.type = CHARACTER_LITERAL
+
+ # C.g:598:5: ( ( 'L' )? '\\'' ( EscapeSequence | ~ ( '\\'' | '\\\\' ) ) '\\'' )
+ # C.g:598:9: ( 'L' )? '\\'' ( EscapeSequence | ~ ( '\\'' | '\\\\' ) ) '\\''
+ # C.g:598:9: ( 'L' )?
+ alt2 = 2
+ LA2_0 = self.input.LA(1)
+
+ if (LA2_0 == u'L') :
+ alt2 = 1
+ if alt2 == 1:
+ # C.g:598:10: 'L'
+ self.match(u'L')
+
+
+
+
+ self.match(u'\'')
+
+ # C.g:598:21: ( EscapeSequence | ~ ( '\\'' | '\\\\' ) )
+ alt3 = 2
+ LA3_0 = self.input.LA(1)
+
+ if (LA3_0 == u'\\') :
+ alt3 = 1
+ elif ((u'\u0000' <= LA3_0 <= u'&') or (u'(' <= LA3_0 <= u'[') or (u']' <= LA3_0 <= u'\uFFFE')) :
+ alt3 = 2
+ else:
+ nvae = NoViableAltException("598:21: ( EscapeSequence | ~ ( '\\'' | '\\\\' ) )", 3, 0, self.input)
+
+ raise nvae
+
+ if alt3 == 1:
+ # C.g:598:23: EscapeSequence
+ self.mEscapeSequence()
+
+
+
+ elif alt3 == 2:
+ # C.g:598:40: ~ ( '\\'' | '\\\\' )
+ if (u'\u0000' <= self.input.LA(1) <= u'&') or (u'(' <= self.input.LA(1) <= u'[') or (u']' <= self.input.LA(1) <= u'\uFFFE'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+
+ self.match(u'\'')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end CHARACTER_LITERAL
+
+
+
+ # $ANTLR start STRING_LITERAL
+ def mSTRING_LITERAL(self, ):
+
+ try:
+ self.type = STRING_LITERAL
+
+ # C.g:602:5: ( ( 'L' )? '\"' ( EscapeSequence | ~ ( '\\\\' | '\"' ) )* '\"' )
+ # C.g:602:8: ( 'L' )? '\"' ( EscapeSequence | ~ ( '\\\\' | '\"' ) )* '\"'
+ # C.g:602:8: ( 'L' )?
+ alt4 = 2
+ LA4_0 = self.input.LA(1)
+
+ if (LA4_0 == u'L') :
+ alt4 = 1
+ if alt4 == 1:
+ # C.g:602:9: 'L'
+ self.match(u'L')
+
+
+
+
+ self.match(u'"')
+
+ # C.g:602:19: ( EscapeSequence | ~ ( '\\\\' | '\"' ) )*
+ while True: #loop5
+ alt5 = 3
+ LA5_0 = self.input.LA(1)
+
+ if (LA5_0 == u'\\') :
+ alt5 = 1
+ elif ((u'\u0000' <= LA5_0 <= u'!') or (u'#' <= LA5_0 <= u'[') or (u']' <= LA5_0 <= u'\uFFFE')) :
+ alt5 = 2
+
+
+ if alt5 == 1:
+ # C.g:602:21: EscapeSequence
+ self.mEscapeSequence()
+
+
+
+ elif alt5 == 2:
+ # C.g:602:38: ~ ( '\\\\' | '\"' )
+ if (u'\u0000' <= self.input.LA(1) <= u'!') or (u'#' <= self.input.LA(1) <= u'[') or (u']' <= self.input.LA(1) <= u'\uFFFE'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ else:
+ break #loop5
+
+
+ self.match(u'"')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end STRING_LITERAL
+
+
+
+ # $ANTLR start HEX_LITERAL
+ def mHEX_LITERAL(self, ):
+
+ try:
+ self.type = HEX_LITERAL
+
+ # C.g:605:13: ( '0' ( 'x' | 'X' ) ( HexDigit )+ ( IntegerTypeSuffix )? )
+ # C.g:605:15: '0' ( 'x' | 'X' ) ( HexDigit )+ ( IntegerTypeSuffix )?
+ self.match(u'0')
+
+ if self.input.LA(1) == u'X' or self.input.LA(1) == u'x':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+ # C.g:605:29: ( HexDigit )+
+ cnt6 = 0
+ while True: #loop6
+ alt6 = 2
+ LA6_0 = self.input.LA(1)
+
+ if ((u'0' <= LA6_0 <= u'9') or (u'A' <= LA6_0 <= u'F') or (u'a' <= LA6_0 <= u'f')) :
+ alt6 = 1
+
+
+ if alt6 == 1:
+ # C.g:605:29: HexDigit
+ self.mHexDigit()
+
+
+
+ else:
+ if cnt6 >= 1:
+ break #loop6
+
+ eee = EarlyExitException(6, self.input)
+ raise eee
+
+ cnt6 += 1
+
+
+ # C.g:605:39: ( IntegerTypeSuffix )?
+ alt7 = 2
+ LA7_0 = self.input.LA(1)
+
+ if (LA7_0 == u'L' or LA7_0 == u'U' or LA7_0 == u'l' or LA7_0 == u'u') :
+ alt7 = 1
+ if alt7 == 1:
+ # C.g:605:39: IntegerTypeSuffix
+ self.mIntegerTypeSuffix()
+
+
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end HEX_LITERAL
+
+
+
+ # $ANTLR start DECIMAL_LITERAL
+ def mDECIMAL_LITERAL(self, ):
+
+ try:
+ self.type = DECIMAL_LITERAL
+
+ # C.g:607:17: ( ( '0' | '1' .. '9' ( '0' .. '9' )* ) ( IntegerTypeSuffix )? )
+ # C.g:607:19: ( '0' | '1' .. '9' ( '0' .. '9' )* ) ( IntegerTypeSuffix )?
+ # C.g:607:19: ( '0' | '1' .. '9' ( '0' .. '9' )* )
+ alt9 = 2
+ LA9_0 = self.input.LA(1)
+
+ if (LA9_0 == u'0') :
+ alt9 = 1
+ elif ((u'1' <= LA9_0 <= u'9')) :
+ alt9 = 2
+ else:
+ nvae = NoViableAltException("607:19: ( '0' | '1' .. '9' ( '0' .. '9' )* )", 9, 0, self.input)
+
+ raise nvae
+
+ if alt9 == 1:
+ # C.g:607:20: '0'
+ self.match(u'0')
+
+
+
+ elif alt9 == 2:
+ # C.g:607:26: '1' .. '9' ( '0' .. '9' )*
+ self.matchRange(u'1', u'9')
+
+ # C.g:607:35: ( '0' .. '9' )*
+ while True: #loop8
+ alt8 = 2
+ LA8_0 = self.input.LA(1)
+
+ if ((u'0' <= LA8_0 <= u'9')) :
+ alt8 = 1
+
+
+ if alt8 == 1:
+ # C.g:607:35: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ break #loop8
+
+
+
+
+
+ # C.g:607:46: ( IntegerTypeSuffix )?
+ alt10 = 2
+ LA10_0 = self.input.LA(1)
+
+ if (LA10_0 == u'L' or LA10_0 == u'U' or LA10_0 == u'l' or LA10_0 == u'u') :
+ alt10 = 1
+ if alt10 == 1:
+ # C.g:607:46: IntegerTypeSuffix
+ self.mIntegerTypeSuffix()
+
+
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end DECIMAL_LITERAL
+
+
+
+ # $ANTLR start OCTAL_LITERAL
+ def mOCTAL_LITERAL(self, ):
+
+ try:
+ self.type = OCTAL_LITERAL
+
+ # C.g:609:15: ( '0' ( '0' .. '7' )+ ( IntegerTypeSuffix )? )
+ # C.g:609:17: '0' ( '0' .. '7' )+ ( IntegerTypeSuffix )?
+ self.match(u'0')
+
+ # C.g:609:21: ( '0' .. '7' )+
+ cnt11 = 0
+ while True: #loop11
+ alt11 = 2
+ LA11_0 = self.input.LA(1)
+
+ if ((u'0' <= LA11_0 <= u'7')) :
+ alt11 = 1
+
+
+ if alt11 == 1:
+ # C.g:609:22: '0' .. '7'
+ self.matchRange(u'0', u'7')
+
+
+
+ else:
+ if cnt11 >= 1:
+ break #loop11
+
+ eee = EarlyExitException(11, self.input)
+ raise eee
+
+ cnt11 += 1
+
+
+ # C.g:609:33: ( IntegerTypeSuffix )?
+ alt12 = 2
+ LA12_0 = self.input.LA(1)
+
+ if (LA12_0 == u'L' or LA12_0 == u'U' or LA12_0 == u'l' or LA12_0 == u'u') :
+ alt12 = 1
+ if alt12 == 1:
+ # C.g:609:33: IntegerTypeSuffix
+ self.mIntegerTypeSuffix()
+
+
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end OCTAL_LITERAL
+
+
+
+ # $ANTLR start HexDigit
+ def mHexDigit(self, ):
+
+ try:
+ # C.g:612:10: ( ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' ) )
+ # C.g:612:12: ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' )
+ if (u'0' <= self.input.LA(1) <= u'9') or (u'A' <= self.input.LA(1) <= u'F') or (u'a' <= self.input.LA(1) <= u'f'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end HexDigit
+
+
+
+ # $ANTLR start IntegerTypeSuffix
+ def mIntegerTypeSuffix(self, ):
+
+ try:
+ # C.g:616:2: ( ( 'u' | 'U' ) | ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' ) )
+ alt13 = 4
+ LA13_0 = self.input.LA(1)
+
+ if (LA13_0 == u'U' or LA13_0 == u'u') :
+ LA13_1 = self.input.LA(2)
+
+ if (LA13_1 == u'L' or LA13_1 == u'l') :
+ LA13_3 = self.input.LA(3)
+
+ if (LA13_3 == u'L' or LA13_3 == u'l') :
+ alt13 = 4
+ else:
+ alt13 = 3
+ else:
+ alt13 = 1
+ elif (LA13_0 == u'L' or LA13_0 == u'l') :
+ alt13 = 2
+ else:
+ nvae = NoViableAltException("614:1: fragment IntegerTypeSuffix : ( ( 'u' | 'U' ) | ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' ) );", 13, 0, self.input)
+
+ raise nvae
+
+ if alt13 == 1:
+ # C.g:616:4: ( 'u' | 'U' )
+ if self.input.LA(1) == u'U' or self.input.LA(1) == u'u':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ elif alt13 == 2:
+ # C.g:617:4: ( 'l' | 'L' )
+ if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ elif alt13 == 3:
+ # C.g:618:4: ( 'u' | 'U' ) ( 'l' | 'L' )
+ if self.input.LA(1) == u'U' or self.input.LA(1) == u'u':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+ if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ elif alt13 == 4:
+ # C.g:619:4: ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' )
+ if self.input.LA(1) == u'U' or self.input.LA(1) == u'u':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+ if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+ if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end IntegerTypeSuffix
+
+
+
+ # $ANTLR start FLOATING_POINT_LITERAL
+ def mFLOATING_POINT_LITERAL(self, ):
+
+ try:
+ self.type = FLOATING_POINT_LITERAL
+
+ # C.g:623:5: ( ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( Exponent )? ( FloatTypeSuffix )? | '.' ( '0' .. '9' )+ ( Exponent )? ( FloatTypeSuffix )? | ( '0' .. '9' )+ Exponent ( FloatTypeSuffix )? | ( '0' .. '9' )+ ( Exponent )? FloatTypeSuffix )
+ alt25 = 4
+ alt25 = self.dfa25.predict(self.input)
+ if alt25 == 1:
+ # C.g:623:9: ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( Exponent )? ( FloatTypeSuffix )?
+ # C.g:623:9: ( '0' .. '9' )+
+ cnt14 = 0
+ while True: #loop14
+ alt14 = 2
+ LA14_0 = self.input.LA(1)
+
+ if ((u'0' <= LA14_0 <= u'9')) :
+ alt14 = 1
+
+
+ if alt14 == 1:
+ # C.g:623:10: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ if cnt14 >= 1:
+ break #loop14
+
+ eee = EarlyExitException(14, self.input)
+ raise eee
+
+ cnt14 += 1
+
+
+ self.match(u'.')
+
+ # C.g:623:25: ( '0' .. '9' )*
+ while True: #loop15
+ alt15 = 2
+ LA15_0 = self.input.LA(1)
+
+ if ((u'0' <= LA15_0 <= u'9')) :
+ alt15 = 1
+
+
+ if alt15 == 1:
+ # C.g:623:26: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ break #loop15
+
+
+ # C.g:623:37: ( Exponent )?
+ alt16 = 2
+ LA16_0 = self.input.LA(1)
+
+ if (LA16_0 == u'E' or LA16_0 == u'e') :
+ alt16 = 1
+ if alt16 == 1:
+ # C.g:623:37: Exponent
+ self.mExponent()
+
+
+
+
+ # C.g:623:47: ( FloatTypeSuffix )?
+ alt17 = 2
+ LA17_0 = self.input.LA(1)
+
+ if (LA17_0 == u'D' or LA17_0 == u'F' or LA17_0 == u'd' or LA17_0 == u'f') :
+ alt17 = 1
+ if alt17 == 1:
+ # C.g:623:47: FloatTypeSuffix
+ self.mFloatTypeSuffix()
+
+
+
+
+
+
+ elif alt25 == 2:
+ # C.g:624:9: '.' ( '0' .. '9' )+ ( Exponent )? ( FloatTypeSuffix )?
+ self.match(u'.')
+
+ # C.g:624:13: ( '0' .. '9' )+
+ cnt18 = 0
+ while True: #loop18
+ alt18 = 2
+ LA18_0 = self.input.LA(1)
+
+ if ((u'0' <= LA18_0 <= u'9')) :
+ alt18 = 1
+
+
+ if alt18 == 1:
+ # C.g:624:14: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ if cnt18 >= 1:
+ break #loop18
+
+ eee = EarlyExitException(18, self.input)
+ raise eee
+
+ cnt18 += 1
+
+
+ # C.g:624:25: ( Exponent )?
+ alt19 = 2
+ LA19_0 = self.input.LA(1)
+
+ if (LA19_0 == u'E' or LA19_0 == u'e') :
+ alt19 = 1
+ if alt19 == 1:
+ # C.g:624:25: Exponent
+ self.mExponent()
+
+
+
+
+ # C.g:624:35: ( FloatTypeSuffix )?
+ alt20 = 2
+ LA20_0 = self.input.LA(1)
+
+ if (LA20_0 == u'D' or LA20_0 == u'F' or LA20_0 == u'd' or LA20_0 == u'f') :
+ alt20 = 1
+ if alt20 == 1:
+ # C.g:624:35: FloatTypeSuffix
+ self.mFloatTypeSuffix()
+
+
+
+
+
+
+ elif alt25 == 3:
+ # C.g:625:9: ( '0' .. '9' )+ Exponent ( FloatTypeSuffix )?
+ # C.g:625:9: ( '0' .. '9' )+
+ cnt21 = 0
+ while True: #loop21
+ alt21 = 2
+ LA21_0 = self.input.LA(1)
+
+ if ((u'0' <= LA21_0 <= u'9')) :
+ alt21 = 1
+
+
+ if alt21 == 1:
+ # C.g:625:10: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ if cnt21 >= 1:
+ break #loop21
+
+ eee = EarlyExitException(21, self.input)
+ raise eee
+
+ cnt21 += 1
+
+
+ self.mExponent()
+
+ # C.g:625:30: ( FloatTypeSuffix )?
+ alt22 = 2
+ LA22_0 = self.input.LA(1)
+
+ if (LA22_0 == u'D' or LA22_0 == u'F' or LA22_0 == u'd' or LA22_0 == u'f') :
+ alt22 = 1
+ if alt22 == 1:
+ # C.g:625:30: FloatTypeSuffix
+ self.mFloatTypeSuffix()
+
+
+
+
+
+
+ elif alt25 == 4:
+ # C.g:626:9: ( '0' .. '9' )+ ( Exponent )? FloatTypeSuffix
+ # C.g:626:9: ( '0' .. '9' )+
+ cnt23 = 0
+ while True: #loop23
+ alt23 = 2
+ LA23_0 = self.input.LA(1)
+
+ if ((u'0' <= LA23_0 <= u'9')) :
+ alt23 = 1
+
+
+ if alt23 == 1:
+ # C.g:626:10: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ if cnt23 >= 1:
+ break #loop23
+
+ eee = EarlyExitException(23, self.input)
+ raise eee
+
+ cnt23 += 1
+
+
+ # C.g:626:21: ( Exponent )?
+ alt24 = 2
+ LA24_0 = self.input.LA(1)
+
+ if (LA24_0 == u'E' or LA24_0 == u'e') :
+ alt24 = 1
+ if alt24 == 1:
+ # C.g:626:21: Exponent
+ self.mExponent()
+
+
+
+
+ self.mFloatTypeSuffix()
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end FLOATING_POINT_LITERAL
+
+
+
+ # $ANTLR start Exponent
+ def mExponent(self, ):
+
+ try:
+ # C.g:630:10: ( ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ )
+ # C.g:630:12: ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+
+ if self.input.LA(1) == u'E' or self.input.LA(1) == u'e':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+ # C.g:630:22: ( '+' | '-' )?
+ alt26 = 2
+ LA26_0 = self.input.LA(1)
+
+ if (LA26_0 == u'+' or LA26_0 == u'-') :
+ alt26 = 1
+ if alt26 == 1:
+ # C.g:
+ if self.input.LA(1) == u'+' or self.input.LA(1) == u'-':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+
+ # C.g:630:33: ( '0' .. '9' )+
+ cnt27 = 0
+ while True: #loop27
+ alt27 = 2
+ LA27_0 = self.input.LA(1)
+
+ if ((u'0' <= LA27_0 <= u'9')) :
+ alt27 = 1
+
+
+ if alt27 == 1:
+ # C.g:630:34: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ if cnt27 >= 1:
+ break #loop27
+
+ eee = EarlyExitException(27, self.input)
+ raise eee
+
+ cnt27 += 1
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end Exponent
+
+
+
+ # $ANTLR start FloatTypeSuffix
+ def mFloatTypeSuffix(self, ):
+
+ try:
+ # C.g:633:17: ( ( 'f' | 'F' | 'd' | 'D' ) )
+ # C.g:633:19: ( 'f' | 'F' | 'd' | 'D' )
+ if self.input.LA(1) == u'D' or self.input.LA(1) == u'F' or self.input.LA(1) == u'd' or self.input.LA(1) == u'f':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end FloatTypeSuffix
+
+
+
+ # $ANTLR start EscapeSequence
+ def mEscapeSequence(self, ):
+
+ try:
+ # C.g:637:5: ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape )
+ alt28 = 2
+ LA28_0 = self.input.LA(1)
+
+ if (LA28_0 == u'\\') :
+ LA28_1 = self.input.LA(2)
+
+ if (LA28_1 == u'"' or LA28_1 == u'\'' or LA28_1 == u'\\' or LA28_1 == u'b' or LA28_1 == u'f' or LA28_1 == u'n' or LA28_1 == u'r' or LA28_1 == u't') :
+ alt28 = 1
+ elif ((u'0' <= LA28_1 <= u'7')) :
+ alt28 = 2
+ else:
+ nvae = NoViableAltException("635:1: fragment EscapeSequence : ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape );", 28, 1, self.input)
+
+ raise nvae
+
+ else:
+ nvae = NoViableAltException("635:1: fragment EscapeSequence : ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape );", 28, 0, self.input)
+
+ raise nvae
+
+ if alt28 == 1:
+ # C.g:637:8: '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' )
+ self.match(u'\\')
+
+ if self.input.LA(1) == u'"' or self.input.LA(1) == u'\'' or self.input.LA(1) == u'\\' or self.input.LA(1) == u'b' or self.input.LA(1) == u'f' or self.input.LA(1) == u'n' or self.input.LA(1) == u'r' or self.input.LA(1) == u't':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ elif alt28 == 2:
+ # C.g:638:9: OctalEscape
+ self.mOctalEscape()
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end EscapeSequence
+
+
+
+ # $ANTLR start OctalEscape
+ def mOctalEscape(self, ):
+
+ try:
+ # C.g:643:5: ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) )
+ alt29 = 3
+ LA29_0 = self.input.LA(1)
+
+ if (LA29_0 == u'\\') :
+ LA29_1 = self.input.LA(2)
+
+ if ((u'0' <= LA29_1 <= u'3')) :
+ LA29_2 = self.input.LA(3)
+
+ if ((u'0' <= LA29_2 <= u'7')) :
+ LA29_4 = self.input.LA(4)
+
+ if ((u'0' <= LA29_4 <= u'7')) :
+ alt29 = 1
+ else:
+ alt29 = 2
+ else:
+ alt29 = 3
+ elif ((u'4' <= LA29_1 <= u'7')) :
+ LA29_3 = self.input.LA(3)
+
+ if ((u'0' <= LA29_3 <= u'7')) :
+ alt29 = 2
+ else:
+ alt29 = 3
+ else:
+ nvae = NoViableAltException("641:1: fragment OctalEscape : ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) );", 29, 1, self.input)
+
+ raise nvae
+
+ else:
+ nvae = NoViableAltException("641:1: fragment OctalEscape : ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) );", 29, 0, self.input)
+
+ raise nvae
+
+ if alt29 == 1:
+ # C.g:643:9: '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' )
+ self.match(u'\\')
+
+ # C.g:643:14: ( '0' .. '3' )
+ # C.g:643:15: '0' .. '3'
+ self.matchRange(u'0', u'3')
+
+
+
+
+ # C.g:643:25: ( '0' .. '7' )
+ # C.g:643:26: '0' .. '7'
+ self.matchRange(u'0', u'7')
+
+
+
+
+ # C.g:643:36: ( '0' .. '7' )
+ # C.g:643:37: '0' .. '7'
+ self.matchRange(u'0', u'7')
+
+
+
+
+
+
+ elif alt29 == 2:
+ # C.g:644:9: '\\\\' ( '0' .. '7' ) ( '0' .. '7' )
+ self.match(u'\\')
+
+ # C.g:644:14: ( '0' .. '7' )
+ # C.g:644:15: '0' .. '7'
+ self.matchRange(u'0', u'7')
+
+
+
+
+ # C.g:644:25: ( '0' .. '7' )
+ # C.g:644:26: '0' .. '7'
+ self.matchRange(u'0', u'7')
+
+
+
+
+
+
+ elif alt29 == 3:
+ # C.g:645:9: '\\\\' ( '0' .. '7' )
+ self.match(u'\\')
+
+ # C.g:645:14: ( '0' .. '7' )
+ # C.g:645:15: '0' .. '7'
+ self.matchRange(u'0', u'7')
+
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end OctalEscape
+
+
+
+ # $ANTLR start UnicodeEscape
+ def mUnicodeEscape(self, ):
+
+ try:
+ # C.g:650:5: ( '\\\\' 'u' HexDigit HexDigit HexDigit HexDigit )
+ # C.g:650:9: '\\\\' 'u' HexDigit HexDigit HexDigit HexDigit
+ self.match(u'\\')
+
+ self.match(u'u')
+
+ self.mHexDigit()
+
+ self.mHexDigit()
+
+ self.mHexDigit()
+
+ self.mHexDigit()
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end UnicodeEscape
+
+
+
+ # $ANTLR start WS
+ def mWS(self, ):
+
+ try:
+ self.type = WS
+
+ # C.g:653:5: ( ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' ) )
+ # C.g:653:8: ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' )
+ if (u'\t' <= self.input.LA(1) <= u'\n') or (u'\f' <= self.input.LA(1) <= u'\r') or self.input.LA(1) == u' ':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+ #action start
+ self.channel=HIDDEN;
+ #action end
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end WS
+
+
+
+ # $ANTLR start BS
+ def mBS(self, ):
+
+ try:
+ self.type = BS
+
+ # C.g:657:5: ( ( '\\\\' ) )
+ # C.g:657:7: ( '\\\\' )
+ # C.g:657:7: ( '\\\\' )
+ # C.g:657:8: '\\\\'
+ self.match(u'\\')
+
+
+
+
+ #action start
+ self.channel=HIDDEN;
+ #action end
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end BS
+
+
+
+ # $ANTLR start UnicodeVocabulary
+ def mUnicodeVocabulary(self, ):
+
+ try:
+ self.type = UnicodeVocabulary
+
+ # C.g:665:5: ( '\\u0003' .. '\\uFFFE' )
+ # C.g:665:7: '\\u0003' .. '\\uFFFE'
+ self.matchRange(u'\u0003', u'\uFFFE')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end UnicodeVocabulary
+
+
+
+ # $ANTLR start COMMENT
+ def mCOMMENT(self, ):
+
+ try:
+ self.type = COMMENT
+
+ # C.g:668:5: ( '/*' ( options {greedy=false; } : . )* '*/' )
+ # C.g:668:9: '/*' ( options {greedy=false; } : . )* '*/'
+ self.match("/*")
+
+
+ # C.g:668:14: ( options {greedy=false; } : . )*
+ while True: #loop30
+ alt30 = 2
+ LA30_0 = self.input.LA(1)
+
+ if (LA30_0 == u'*') :
+ LA30_1 = self.input.LA(2)
+
+ if (LA30_1 == u'/') :
+ alt30 = 2
+ elif ((u'\u0000' <= LA30_1 <= u'.') or (u'0' <= LA30_1 <= u'\uFFFE')) :
+ alt30 = 1
+
+
+ elif ((u'\u0000' <= LA30_0 <= u')') or (u'+' <= LA30_0 <= u'\uFFFE')) :
+ alt30 = 1
+
+
+ if alt30 == 1:
+ # C.g:668:42: .
+ self.matchAny()
+
+
+
+ else:
+ break #loop30
+
+
+ self.match("*/")
+
+
+ #action start
+ self.channel=HIDDEN;
+ #action end
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end COMMENT
+
+
+
+ # $ANTLR start LINE_COMMENT
+ def mLINE_COMMENT(self, ):
+
+ try:
+ self.type = LINE_COMMENT
+
+ # C.g:673:5: ( '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' )
+ # C.g:673:7: '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
+ self.match("//")
+
+
+ # C.g:673:12: (~ ( '\\n' | '\\r' ) )*
+ while True: #loop31
+ alt31 = 2
+ LA31_0 = self.input.LA(1)
+
+ if ((u'\u0000' <= LA31_0 <= u'\t') or (u'\u000B' <= LA31_0 <= u'\f') or (u'\u000E' <= LA31_0 <= u'\uFFFE')) :
+ alt31 = 1
+
+
+ if alt31 == 1:
+ # C.g:673:12: ~ ( '\\n' | '\\r' )
+ if (u'\u0000' <= self.input.LA(1) <= u'\t') or (u'\u000B' <= self.input.LA(1) <= u'\f') or (u'\u000E' <= self.input.LA(1) <= u'\uFFFE'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ else:
+ break #loop31
+
+
+ # C.g:673:26: ( '\\r' )?
+ alt32 = 2
+ LA32_0 = self.input.LA(1)
+
+ if (LA32_0 == u'\r') :
+ alt32 = 1
+ if alt32 == 1:
+ # C.g:673:26: '\\r'
+ self.match(u'\r')
+
+
+
+
+ self.match(u'\n')
+
+ #action start
+ self.channel=HIDDEN;
+ #action end
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end LINE_COMMENT
+
+
+
+ # $ANTLR start LINE_COMMAND
+ def mLINE_COMMAND(self, ):
+
+ try:
+ self.type = LINE_COMMAND
+
+ # C.g:678:5: ( '#' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' )
+ # C.g:678:7: '#' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
+ self.match(u'#')
+
+ # C.g:678:11: (~ ( '\\n' | '\\r' ) )*
+ while True: #loop33
+ alt33 = 2
+ LA33_0 = self.input.LA(1)
+
+ if ((u'\u0000' <= LA33_0 <= u'\t') or (u'\u000B' <= LA33_0 <= u'\f') or (u'\u000E' <= LA33_0 <= u'\uFFFE')) :
+ alt33 = 1
+
+
+ if alt33 == 1:
+ # C.g:678:11: ~ ( '\\n' | '\\r' )
+ if (u'\u0000' <= self.input.LA(1) <= u'\t') or (u'\u000B' <= self.input.LA(1) <= u'\f') or (u'\u000E' <= self.input.LA(1) <= u'\uFFFE'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ else:
+ break #loop33
+
+
+ # C.g:678:25: ( '\\r' )?
+ alt34 = 2
+ LA34_0 = self.input.LA(1)
+
+ if (LA34_0 == u'\r') :
+ alt34 = 1
+ if alt34 == 1:
+ # C.g:678:25: '\\r'
+ self.match(u'\r')
+
+
+
+
+ self.match(u'\n')
+
+ #action start
+ self.channel=HIDDEN;
+ #action end
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end LINE_COMMAND
+
+
+
+ def mTokens(self):
+ # C.g:1:8: ( T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | T33 | T34 | T35 | T36 | T37 | T38 | T39 | T40 | T41 | T42 | T43 | T44 | T45 | T46 | T47 | T48 | T49 | T50 | T51 | T52 | T53 | T54 | T55 | T56 | T57 | T58 | T59 | T60 | T61 | T62 | T63 | T64 | T65 | T66 | T67 | T68 | T69 | T70 | T71 | T72 | T73 | T74 | T75 | T76 | T77 | T78 | T79 | T80 | T81 | T82 | T83 | T84 | T85 | T86 | T87 | T88 | T89 | T90 | T91 | T92 | T93 | T94 | T95 | T96 | T97 | T98 | T99 | T100 | T101 | T102 | T103 | T104 | T105 | T106 | T107 | T108 | T109 | T110 | T111 | T112 | T113 | T114 | T115 | T116 | T117 | IDENTIFIER | CHARACTER_LITERAL | STRING_LITERAL | HEX_LITERAL | DECIMAL_LITERAL | OCTAL_LITERAL | FLOATING_POINT_LITERAL | WS | BS | UnicodeVocabulary | COMMENT | LINE_COMMENT | LINE_COMMAND )
+ alt35 = 106
+ alt35 = self.dfa35.predict(self.input)
+ if alt35 == 1:
+ # C.g:1:10: T25
+ self.mT25()
+
+
+
+ elif alt35 == 2:
+ # C.g:1:14: T26
+ self.mT26()
+
+
+
+ elif alt35 == 3:
+ # C.g:1:18: T27
+ self.mT27()
+
+
+
+ elif alt35 == 4:
+ # C.g:1:22: T28
+ self.mT28()
+
+
+
+ elif alt35 == 5:
+ # C.g:1:26: T29
+ self.mT29()
+
+
+
+ elif alt35 == 6:
+ # C.g:1:30: T30
+ self.mT30()
+
+
+
+ elif alt35 == 7:
+ # C.g:1:34: T31
+ self.mT31()
+
+
+
+ elif alt35 == 8:
+ # C.g:1:38: T32
+ self.mT32()
+
+
+
+ elif alt35 == 9:
+ # C.g:1:42: T33
+ self.mT33()
+
+
+
+ elif alt35 == 10:
+ # C.g:1:46: T34
+ self.mT34()
+
+
+
+ elif alt35 == 11:
+ # C.g:1:50: T35
+ self.mT35()
+
+
+
+ elif alt35 == 12:
+ # C.g:1:54: T36
+ self.mT36()
+
+
+
+ elif alt35 == 13:
+ # C.g:1:58: T37
+ self.mT37()
+
+
+
+ elif alt35 == 14:
+ # C.g:1:62: T38
+ self.mT38()
+
+
+
+ elif alt35 == 15:
+ # C.g:1:66: T39
+ self.mT39()
+
+
+
+ elif alt35 == 16:
+ # C.g:1:70: T40
+ self.mT40()
+
+
+
+ elif alt35 == 17:
+ # C.g:1:74: T41
+ self.mT41()
+
+
+
+ elif alt35 == 18:
+ # C.g:1:78: T42
+ self.mT42()
+
+
+
+ elif alt35 == 19:
+ # C.g:1:82: T43
+ self.mT43()
+
+
+
+ elif alt35 == 20:
+ # C.g:1:86: T44
+ self.mT44()
+
+
+
+ elif alt35 == 21:
+ # C.g:1:90: T45
+ self.mT45()
+
+
+
+ elif alt35 == 22:
+ # C.g:1:94: T46
+ self.mT46()
+
+
+
+ elif alt35 == 23:
+ # C.g:1:98: T47
+ self.mT47()
+
+
+
+ elif alt35 == 24:
+ # C.g:1:102: T48
+ self.mT48()
+
+
+
+ elif alt35 == 25:
+ # C.g:1:106: T49
+ self.mT49()
+
+
+
+ elif alt35 == 26:
+ # C.g:1:110: T50
+ self.mT50()
+
+
+
+ elif alt35 == 27:
+ # C.g:1:114: T51
+ self.mT51()
+
+
+
+ elif alt35 == 28:
+ # C.g:1:118: T52
+ self.mT52()
+
+
+
+ elif alt35 == 29:
+ # C.g:1:122: T53
+ self.mT53()
+
+
+
+ elif alt35 == 30:
+ # C.g:1:126: T54
+ self.mT54()
+
+
+
+ elif alt35 == 31:
+ # C.g:1:130: T55
+ self.mT55()
+
+
+
+ elif alt35 == 32:
+ # C.g:1:134: T56
+ self.mT56()
+
+
+
+ elif alt35 == 33:
+ # C.g:1:138: T57
+ self.mT57()
+
+
+
+ elif alt35 == 34:
+ # C.g:1:142: T58
+ self.mT58()
+
+
+
+ elif alt35 == 35:
+ # C.g:1:146: T59
+ self.mT59()
+
+
+
+ elif alt35 == 36:
+ # C.g:1:150: T60
+ self.mT60()
+
+
+
+ elif alt35 == 37:
+ # C.g:1:154: T61
+ self.mT61()
+
+
+
+ elif alt35 == 38:
+ # C.g:1:158: T62
+ self.mT62()
+
+
+
+ elif alt35 == 39:
+ # C.g:1:162: T63
+ self.mT63()
+
+
+
+ elif alt35 == 40:
+ # C.g:1:166: T64
+ self.mT64()
+
+
+
+ elif alt35 == 41:
+ # C.g:1:170: T65
+ self.mT65()
+
+
+
+ elif alt35 == 42:
+ # C.g:1:174: T66
+ self.mT66()
+
+
+
+ elif alt35 == 43:
+ # C.g:1:178: T67
+ self.mT67()
+
+
+
+ elif alt35 == 44:
+ # C.g:1:182: T68
+ self.mT68()
+
+
+
+ elif alt35 == 45:
+ # C.g:1:186: T69
+ self.mT69()
+
+
+
+ elif alt35 == 46:
+ # C.g:1:190: T70
+ self.mT70()
+
+
+
+ elif alt35 == 47:
+ # C.g:1:194: T71
+ self.mT71()
+
+
+
+ elif alt35 == 48:
+ # C.g:1:198: T72
+ self.mT72()
+
+
+
+ elif alt35 == 49:
+ # C.g:1:202: T73
+ self.mT73()
+
+
+
+ elif alt35 == 50:
+ # C.g:1:206: T74
+ self.mT74()
+
+
+
+ elif alt35 == 51:
+ # C.g:1:210: T75
+ self.mT75()
+
+
+
+ elif alt35 == 52:
+ # C.g:1:214: T76
+ self.mT76()
+
+
+
+ elif alt35 == 53:
+ # C.g:1:218: T77
+ self.mT77()
+
+
+
+ elif alt35 == 54:
+ # C.g:1:222: T78
+ self.mT78()
+
+
+
+ elif alt35 == 55:
+ # C.g:1:226: T79
+ self.mT79()
+
+
+
+ elif alt35 == 56:
+ # C.g:1:230: T80
+ self.mT80()
+
+
+
+ elif alt35 == 57:
+ # C.g:1:234: T81
+ self.mT81()
+
+
+
+ elif alt35 == 58:
+ # C.g:1:238: T82
+ self.mT82()
+
+
+
+ elif alt35 == 59:
+ # C.g:1:242: T83
+ self.mT83()
+
+
+
+ elif alt35 == 60:
+ # C.g:1:246: T84
+ self.mT84()
+
+
+
+ elif alt35 == 61:
+ # C.g:1:250: T85
+ self.mT85()
+
+
+
+ elif alt35 == 62:
+ # C.g:1:254: T86
+ self.mT86()
+
+
+
+ elif alt35 == 63:
+ # C.g:1:258: T87
+ self.mT87()
+
+
+
+ elif alt35 == 64:
+ # C.g:1:262: T88
+ self.mT88()
+
+
+
+ elif alt35 == 65:
+ # C.g:1:266: T89
+ self.mT89()
+
+
+
+ elif alt35 == 66:
+ # C.g:1:270: T90
+ self.mT90()
+
+
+
+ elif alt35 == 67:
+ # C.g:1:274: T91
+ self.mT91()
+
+
+
+ elif alt35 == 68:
+ # C.g:1:278: T92
+ self.mT92()
+
+
+
+ elif alt35 == 69:
+ # C.g:1:282: T93
+ self.mT93()
+
+
+
+ elif alt35 == 70:
+ # C.g:1:286: T94
+ self.mT94()
+
+
+
+ elif alt35 == 71:
+ # C.g:1:290: T95
+ self.mT95()
+
+
+
+ elif alt35 == 72:
+ # C.g:1:294: T96
+ self.mT96()
+
+
+
+ elif alt35 == 73:
+ # C.g:1:298: T97
+ self.mT97()
+
+
+
+ elif alt35 == 74:
+ # C.g:1:302: T98
+ self.mT98()
+
+
+
+ elif alt35 == 75:
+ # C.g:1:306: T99
+ self.mT99()
+
+
+
+ elif alt35 == 76:
+ # C.g:1:310: T100
+ self.mT100()
+
+
+
+ elif alt35 == 77:
+ # C.g:1:315: T101
+ self.mT101()
+
+
+
+ elif alt35 == 78:
+ # C.g:1:320: T102
+ self.mT102()
+
+
+
+ elif alt35 == 79:
+ # C.g:1:325: T103
+ self.mT103()
+
+
+
+ elif alt35 == 80:
+ # C.g:1:330: T104
+ self.mT104()
+
+
+
+ elif alt35 == 81:
+ # C.g:1:335: T105
+ self.mT105()
+
+
+
+ elif alt35 == 82:
+ # C.g:1:340: T106
+ self.mT106()
+
+
+
+ elif alt35 == 83:
+ # C.g:1:345: T107
+ self.mT107()
+
+
+
+ elif alt35 == 84:
+ # C.g:1:350: T108
+ self.mT108()
+
+
+
+ elif alt35 == 85:
+ # C.g:1:355: T109
+ self.mT109()
+
+
+
+ elif alt35 == 86:
+ # C.g:1:360: T110
+ self.mT110()
+
+
+
+ elif alt35 == 87:
+ # C.g:1:365: T111
+ self.mT111()
+
+
+
+ elif alt35 == 88:
+ # C.g:1:370: T112
+ self.mT112()
+
+
+
+ elif alt35 == 89:
+ # C.g:1:375: T113
+ self.mT113()
+
+
+
+ elif alt35 == 90:
+ # C.g:1:380: T114
+ self.mT114()
+
+
+
+ elif alt35 == 91:
+ # C.g:1:385: T115
+ self.mT115()
+
+
+
+ elif alt35 == 92:
+ # C.g:1:390: T116
+ self.mT116()
+
+
+
+ elif alt35 == 93:
+ # C.g:1:395: T117
+ self.mT117()
+
+
+
+ elif alt35 == 94:
+ # C.g:1:400: IDENTIFIER
+ self.mIDENTIFIER()
+
+
+
+ elif alt35 == 95:
+ # C.g:1:411: CHARACTER_LITERAL
+ self.mCHARACTER_LITERAL()
+
+
+
+ elif alt35 == 96:
+ # C.g:1:429: STRING_LITERAL
+ self.mSTRING_LITERAL()
+
+
+
+ elif alt35 == 97:
+ # C.g:1:444: HEX_LITERAL
+ self.mHEX_LITERAL()
+
+
+
+ elif alt35 == 98:
+ # C.g:1:456: DECIMAL_LITERAL
+ self.mDECIMAL_LITERAL()
+
+
+
+ elif alt35 == 99:
+ # C.g:1:472: OCTAL_LITERAL
+ self.mOCTAL_LITERAL()
+
+
+
+ elif alt35 == 100:
+ # C.g:1:486: FLOATING_POINT_LITERAL
+ self.mFLOATING_POINT_LITERAL()
+
+
+
+ elif alt35 == 101:
+ # C.g:1:509: WS
+ self.mWS()
+
+
+
+ elif alt35 == 102:
+ # C.g:1:512: BS
+ self.mBS()
+
+
+
+ elif alt35 == 103:
+ # C.g:1:515: UnicodeVocabulary
+ self.mUnicodeVocabulary()
+
+
+
+ elif alt35 == 104:
+ # C.g:1:533: COMMENT
+ self.mCOMMENT()
+
+
+
+ elif alt35 == 105:
+ # C.g:1:541: LINE_COMMENT
+ self.mLINE_COMMENT()
+
+
+
+ elif alt35 == 106:
+ # C.g:1:554: LINE_COMMAND
+ self.mLINE_COMMAND()
+
+
+
+
+
+
+
+
+ # lookup tables for DFA #25
+
+ DFA25_eot = DFA.unpack(
+ u"\7\uffff\1\10\2\uffff"
+ )
+
+ DFA25_eof = DFA.unpack(
+ u"\12\uffff"
+ )
+
+ DFA25_min = DFA.unpack(
+ u"\2\56\2\uffff\1\53\1\uffff\2\60\2\uffff"
+ )
+
+ DFA25_max = DFA.unpack(
+ u"\1\71\1\146\2\uffff\1\71\1\uffff\1\71\1\146\2\uffff"
+ )
+
+ DFA25_accept = DFA.unpack(
+ u"\2\uffff\1\2\1\1\1\uffff\1\4\2\uffff\2\3"
+ )
+
+ DFA25_special = DFA.unpack(
+ u"\12\uffff"
+ )
+
+
+ DFA25_transition = [
+ DFA.unpack(u"\1\2\1\uffff\12\1"),
+ DFA.unpack(u"\1\3\1\uffff\12\1\12\uffff\1\5\1\4\1\5\35\uffff\1\5"
+ u"\1\4\1\5"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\6\1\uffff\1\6\2\uffff\12\7"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\12\7"),
+ DFA.unpack(u"\12\7\12\uffff\1\11\1\uffff\1\11\35\uffff\1\11\1\uffff"
+ u"\1\11"),
+ DFA.unpack(u""),
+ DFA.unpack(u"")
+ ]
+
+ # class definition for DFA #25
+
+ DFA25 = DFA
+ # lookup tables for DFA #35
+
+ DFA35_eot = DFA.unpack(
+ u"\2\uffff\1\76\1\uffff\1\101\14\76\3\uffff\10\76\4\uffff\1\151\1"
+ u"\153\1\157\1\163\1\167\1\171\1\174\1\uffff\1\177\1\u0082\1\u0085"
+ u"\1\u0087\1\u008a\1\uffff\5\76\1\uffff\2\73\2\u0095\2\uffff\1\73"
+ u"\2\uffff\1\76\4\uffff\16\76\1\u00ad\5\76\1\u00b4\1\76\3\uffff\1"
+ u"\u00b7\10\76\34\uffff\1\u00c1\2\uffff\1\u00c3\10\uffff\5\76\3\uffff"
+ u"\1\u00c9\1\uffff\1\u0095\3\uffff\23\76\1\uffff\1\u00de\1\76\1\u00e0"
+ u"\3\76\1\uffff\2\76\1\uffff\1\76\1\u00e7\6\76\4\uffff\5\76\1\uffff"
+ u"\1\76\1\u00f5\1\76\1\u00f7\6\76\1\u00fe\4\76\1\u0103\1\u0104\2"
+ u"\76\1\u0107\1\uffff\1\u0108\1\uffff\6\76\1\uffff\10\76\1\u0118"
+ u"\1\76\1\u011a\2\76\1\uffff\1\76\1\uffff\5\76\1\u0123\1\uffff\4"
+ u"\76\2\uffff\1\76\1\u0129\2\uffff\1\u012a\3\76\1\u012e\1\76\1\u0130"
+ u"\7\76\1\u0139\1\uffff\1\u013a\1\uffff\1\u013b\1\76\1\u013d\1\u013e"
+ u"\1\u013f\1\u0140\1\u0141\1\u0142\1\uffff\1\76\1\u0144\1\u0145\2"
+ u"\76\2\uffff\1\76\1\u0149\1\76\1\uffff\1\76\1\uffff\5\76\1\u0151"
+ u"\1\u0152\1\76\3\uffff\1\u0154\6\uffff\1\76\2\uffff\2\76\1\u0158"
+ u"\1\uffff\7\76\2\uffff\1\u0160\1\uffff\1\u0161\1\u0162\1\u0163\1"
+ u"\uffff\1\u0164\1\u0165\1\76\1\u0167\3\76\6\uffff\1\u016b\1\uffff"
+ u"\3\76\1\uffff\21\76\1\u0180\2\76\1\uffff\3\76\1\u0186\1\76\1\uffff"
+ u"\11\76\1\u0191\1\uffff"
+ )
+
+ DFA35_eof = DFA.unpack(
+ u"\u0192\uffff"
+ )
+
+ DFA35_min = DFA.unpack(
+ u"\1\3\1\uffff\1\171\1\uffff\1\75\1\154\1\150\1\165\1\145\1\124\1"
+ u"\157\1\141\1\146\1\157\1\154\1\145\1\156\3\uffff\1\116\1\120\1"
+ u"\117\1\116\1\117\1\114\1\106\1\101\4\uffff\1\75\1\56\1\53\1\55"
+ u"\1\52\1\75\1\46\1\uffff\1\75\1\74\3\75\1\uffff\1\137\1\150\1\157"
+ u"\1\162\1\42\1\uffff\2\0\2\56\2\uffff\1\0\2\uffff\1\160\4\uffff"
+ u"\1\163\1\164\1\165\1\151\1\141\1\147\1\157\1\164\1\147\1\101\1"
+ u"\151\1\163\1\156\1\141\1\44\1\164\1\156\1\162\1\157\1\146\1\44"
+ u"\1\151\3\uffff\1\44\2\124\1\116\1\101\1\114\1\117\1\111\1\103\34"
+ u"\uffff\1\75\2\uffff\1\75\10\uffff\1\141\1\163\1\151\1\164\1\145"
+ u"\3\uffff\1\56\1\uffff\1\56\3\uffff\3\145\1\155\2\164\1\165\1\145"
+ u"\1\156\1\162\1\157\1\151\1\165\1\124\1\141\1\144\1\145\1\163\1"
+ u"\162\1\uffff\1\44\1\147\1\44\2\141\1\142\1\uffff\1\151\1\157\1"
+ u"\uffff\1\111\1\44\1\123\1\114\1\101\1\102\1\101\1\113\4\uffff\1"
+ u"\163\1\155\1\154\1\157\1\141\1\uffff\1\144\1\44\1\162\1\44\1\143"
+ u"\1\151\1\143\1\157\1\145\1\164\1\44\1\163\1\162\1\111\1\164\2\44"
+ u"\1\151\1\164\1\44\1\uffff\1\44\1\uffff\1\164\1\165\1\154\1\147"
+ u"\1\156\1\117\1\uffff\1\124\1\111\1\124\1\101\1\102\1\120\1\105"
+ u"\1\155\1\44\1\145\1\44\1\153\1\145\1\uffff\1\156\1\uffff\1\150"
+ u"\1\143\1\164\1\146\1\144\1\44\1\uffff\1\164\1\156\1\103\1\151\2"
+ u"\uffff\1\156\1\44\2\uffff\1\44\1\154\1\145\1\156\1\44\1\116\1\44"
+ u"\1\107\1\111\1\114\1\125\1\117\1\111\1\104\1\44\1\uffff\1\44\1"
+ u"\uffff\1\44\1\146\6\44\1\uffff\1\145\2\44\1\154\1\165\2\uffff\1"
+ u"\164\1\44\1\145\1\uffff\1\101\1\uffff\1\116\1\114\1\137\1\116\1"
+ u"\117\2\44\1\137\3\uffff\1\44\6\uffff\1\162\2\uffff\2\145\1\44\1"
+ u"\uffff\1\144\1\114\2\105\1\122\2\124\2\uffff\1\44\1\uffff\3\44"
+ u"\1\uffff\2\44\1\104\1\44\1\105\1\111\1\123\6\uffff\1\44\1\uffff"
+ u"\2\115\1\105\1\uffff\1\117\1\105\1\122\1\126\1\123\1\126\2\105"
+ u"\1\111\1\137\1\122\1\103\1\111\1\126\1\105\1\106\1\111\1\44\1\137"
+ u"\1\103\1\uffff\1\125\1\105\1\116\1\44\1\122\1\uffff\1\105\1\106"
+ u"\1\105\1\122\1\105\1\116\1\103\1\105\1\104\1\44\1\uffff"
+ )
+
+ DFA35_max = DFA.unpack(
+ u"\1\ufffe\1\uffff\1\171\1\uffff\1\75\1\170\1\167\1\165\1\145\1\124"
+ u"\2\157\1\156\3\157\1\156\3\uffff\1\116\1\125\1\117\1\116\1\117"
+ u"\1\114\1\106\1\101\4\uffff\1\75\1\71\1\75\1\76\3\75\1\uffff\2\75"
+ u"\1\76\1\75\1\174\1\uffff\1\141\1\150\1\157\1\162\1\47\1\uffff\2"
+ u"\ufffe\1\170\1\146\2\uffff\1\ufffe\2\uffff\1\160\4\uffff\1\163"
+ u"\1\164\1\165\1\151\1\162\1\172\1\157\2\164\1\101\1\154\1\163\1"
+ u"\156\1\141\1\172\1\164\1\156\1\162\1\157\1\146\1\172\1\163\3\uffff"
+ u"\1\172\2\124\1\116\1\101\1\114\1\117\1\111\1\103\34\uffff\1\75"
+ u"\2\uffff\1\75\10\uffff\1\141\1\163\1\151\1\164\1\145\3\uffff\1"
+ u"\146\1\uffff\1\146\3\uffff\3\145\1\155\2\164\1\165\1\145\1\156"
+ u"\1\162\1\157\1\151\1\165\1\124\1\141\1\144\1\145\1\164\1\162\1"
+ u"\uffff\1\172\1\147\1\172\2\141\1\142\1\uffff\1\151\1\157\1\uffff"
+ u"\1\111\1\172\1\123\1\114\1\101\1\102\1\137\1\113\4\uffff\1\163"
+ u"\1\155\1\154\1\157\1\141\1\uffff\1\144\1\172\1\162\1\172\1\143"
+ u"\1\151\1\143\1\157\1\145\1\164\1\172\1\163\1\162\1\111\1\164\2"
+ u"\172\1\151\1\164\1\172\1\uffff\1\172\1\uffff\1\164\1\165\1\154"
+ u"\1\147\1\156\1\117\1\uffff\1\124\1\111\1\124\1\101\1\122\1\120"
+ u"\1\105\1\155\1\172\1\145\1\172\1\153\1\145\1\uffff\1\156\1\uffff"
+ u"\1\150\1\143\1\164\1\146\1\144\1\172\1\uffff\1\164\1\156\1\103"
+ u"\1\151\2\uffff\1\156\1\172\2\uffff\1\172\1\154\1\145\1\156\1\172"
+ u"\1\116\1\172\1\107\1\111\1\114\1\125\1\117\1\111\1\104\1\172\1"
+ u"\uffff\1\172\1\uffff\1\172\1\146\6\172\1\uffff\1\145\2\172\1\154"
+ u"\1\165\2\uffff\1\164\1\172\1\145\1\uffff\1\101\1\uffff\1\116\1"
+ u"\114\1\137\1\116\1\117\2\172\1\137\3\uffff\1\172\6\uffff\1\162"
+ u"\2\uffff\2\145\1\172\1\uffff\1\144\1\114\2\105\1\122\2\124\2\uffff"
+ u"\1\172\1\uffff\3\172\1\uffff\2\172\1\104\1\172\1\105\1\111\1\123"
+ u"\6\uffff\1\172\1\uffff\2\115\1\105\1\uffff\1\117\1\105\1\122\1"
+ u"\126\1\123\1\126\2\105\1\111\1\137\1\122\1\103\1\111\1\126\1\105"
+ u"\1\106\1\111\1\172\1\137\1\103\1\uffff\1\125\1\105\1\116\1\172"
+ u"\1\122\1\uffff\1\105\1\106\1\105\1\122\1\105\1\116\1\103\1\105"
+ u"\1\104\1\172\1\uffff"
+ )
+
+ DFA35_accept = DFA.unpack(
+ u"\1\uffff\1\1\1\uffff\1\3\15\uffff\1\23\1\24\1\27\10\uffff\1\46"
+ u"\1\47\1\50\1\51\7\uffff\1\66\5\uffff\1\102\5\uffff\1\136\4\uffff"
+ u"\1\145\1\146\1\uffff\1\147\1\1\1\uffff\1\136\1\3\1\107\1\4\26\uffff"
+ u"\1\23\1\24\1\27\11\uffff\1\46\1\47\1\50\1\51\1\70\1\52\1\53\1\63"
+ u"\1\144\1\73\1\60\1\54\1\74\1\64\1\61\1\55\1\150\1\151\1\71\1\56"
+ u"\1\72\1\57\1\77\1\104\1\65\1\66\1\110\1\67\1\uffff\1\113\1\111"
+ u"\1\uffff\1\114\1\112\1\100\1\106\1\103\1\101\1\105\1\102\5\uffff"
+ u"\1\140\1\137\1\141\1\uffff\1\142\1\uffff\1\145\1\146\1\152\23\uffff"
+ u"\1\124\6\uffff\1\130\2\uffff\1\33\10\uffff\1\75\1\115\1\76\1\116"
+ u"\5\uffff\1\143\24\uffff\1\15\1\uffff\1\131\6\uffff\1\34\15\uffff"
+ u"\1\125\1\uffff\1\30\6\uffff\1\7\4\uffff\1\12\1\122\2\uffff\1\13"
+ u"\1\16\17\uffff\1\120\1\uffff\1\132\10\uffff\1\14\5\uffff\1\31\1"
+ u"\17\3\uffff\1\26\1\uffff\1\36\10\uffff\1\121\1\127\1\134\1\uffff"
+ u"\1\5\1\126\1\6\1\25\1\62\1\21\1\uffff\1\135\1\11\3\uffff\1\20\7"
+ u"\uffff\1\42\1\45\1\uffff\1\2\3\uffff\1\123\7\uffff\1\117\1\10\1"
+ u"\32\1\133\1\22\1\35\1\uffff\1\40\3\uffff\1\37\24\uffff\1\43\5\uffff"
+ u"\1\44\12\uffff\1\41"
+ )
+
+ DFA35_special = DFA.unpack(
+ u"\u0192\uffff"
+ )
+
+
+ DFA35_transition = [
+ DFA.unpack(u"\6\73\2\70\1\73\2\70\22\73\1\70\1\50\1\65\1\72\1\63"
+ u"\1\45\1\46\1\64\1\34\1\35\1\40\1\42\1\3\1\43\1\41\1\44\1\66\11"
+ u"\67\1\23\1\1\1\51\1\4\1\52\1\55\1\73\2\63\1\26\1\63\1\32\1\63\1"
+ u"\31\1\63\1\24\2\63\1\62\2\63\1\25\1\33\2\63\1\11\1\63\1\27\1\30"
+ u"\4\63\1\36\1\71\1\37\1\53\1\56\1\73\1\7\1\61\1\13\1\17\1\5\1\16"
+ u"\1\60\1\63\1\14\2\63\1\15\5\63\1\10\1\6\1\2\1\20\1\12\1\57\3\63"
+ u"\1\21\1\54\1\22\1\47\uff80\73"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\75"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\100"),
+ DFA.unpack(u"\1\102\1\uffff\1\104\11\uffff\1\103"),
+ DFA.unpack(u"\1\110\1\107\12\uffff\1\106\2\uffff\1\105"),
+ DFA.unpack(u"\1\111"),
+ DFA.unpack(u"\1\112"),
+ DFA.unpack(u"\1\113"),
+ DFA.unpack(u"\1\114"),
+ DFA.unpack(u"\1\115\6\uffff\1\117\6\uffff\1\116"),
+ DFA.unpack(u"\1\120\7\uffff\1\121"),
+ DFA.unpack(u"\1\122"),
+ DFA.unpack(u"\1\124\2\uffff\1\123"),
+ DFA.unpack(u"\1\125\11\uffff\1\126"),
+ DFA.unpack(u"\1\127"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\133"),
+ DFA.unpack(u"\1\134\4\uffff\1\135"),
+ DFA.unpack(u"\1\136"),
+ DFA.unpack(u"\1\137"),
+ DFA.unpack(u"\1\140"),
+ DFA.unpack(u"\1\141"),
+ DFA.unpack(u"\1\142"),
+ DFA.unpack(u"\1\143"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\150"),
+ DFA.unpack(u"\1\152\1\uffff\12\154"),
+ DFA.unpack(u"\1\156\21\uffff\1\155"),
+ DFA.unpack(u"\1\162\17\uffff\1\160\1\161"),
+ DFA.unpack(u"\1\164\4\uffff\1\165\15\uffff\1\166"),
+ DFA.unpack(u"\1\170"),
+ DFA.unpack(u"\1\173\26\uffff\1\172"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\176"),
+ DFA.unpack(u"\1\u0080\1\u0081"),
+ DFA.unpack(u"\1\u0084\1\u0083"),
+ DFA.unpack(u"\1\u0086"),
+ DFA.unpack(u"\1\u0089\76\uffff\1\u0088"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u008c\1\uffff\1\u008d"),
+ DFA.unpack(u"\1\u008e"),
+ DFA.unpack(u"\1\u008f"),
+ DFA.unpack(u"\1\u0090"),
+ DFA.unpack(u"\1\u0091\4\uffff\1\u0092"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\47\u0092\1\uffff\uffd7\u0092"),
+ DFA.unpack(u"\uffff\u0091"),
+ DFA.unpack(u"\1\154\1\uffff\10\u0094\2\154\12\uffff\3\154\21\uffff"
+ u"\1\u0093\13\uffff\3\154\21\uffff\1\u0093"),
+ DFA.unpack(u"\1\154\1\uffff\12\u0096\12\uffff\3\154\35\uffff\3\154"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\uffff\u0099"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u009a"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u009b"),
+ DFA.unpack(u"\1\u009c"),
+ DFA.unpack(u"\1\u009d"),
+ DFA.unpack(u"\1\u009e"),
+ DFA.unpack(u"\1\u009f\20\uffff\1\u00a0"),
+ DFA.unpack(u"\1\u00a2\22\uffff\1\u00a1"),
+ DFA.unpack(u"\1\u00a3"),
+ DFA.unpack(u"\1\u00a4"),
+ DFA.unpack(u"\1\u00a5\14\uffff\1\u00a6"),
+ DFA.unpack(u"\1\u00a7"),
+ DFA.unpack(u"\1\u00a9\2\uffff\1\u00a8"),
+ DFA.unpack(u"\1\u00aa"),
+ DFA.unpack(u"\1\u00ab"),
+ DFA.unpack(u"\1\u00ac"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00ae"),
+ DFA.unpack(u"\1\u00af"),
+ DFA.unpack(u"\1\u00b0"),
+ DFA.unpack(u"\1\u00b1"),
+ DFA.unpack(u"\1\u00b2"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\24\76\1\u00b3\5\76"),
+ DFA.unpack(u"\1\u00b6\11\uffff\1\u00b5"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00b8"),
+ DFA.unpack(u"\1\u00b9"),
+ DFA.unpack(u"\1\u00ba"),
+ DFA.unpack(u"\1\u00bb"),
+ DFA.unpack(u"\1\u00bc"),
+ DFA.unpack(u"\1\u00bd"),
+ DFA.unpack(u"\1\u00be"),
+ DFA.unpack(u"\1\u00bf"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00c0"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00c2"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00c4"),
+ DFA.unpack(u"\1\u00c5"),
+ DFA.unpack(u"\1\u00c6"),
+ DFA.unpack(u"\1\u00c7"),
+ DFA.unpack(u"\1\u00c8"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\154\1\uffff\10\u0094\2\154\12\uffff\3\154\35\uffff"
+ u"\3\154"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\154\1\uffff\12\u0096\12\uffff\3\154\35\uffff\3\154"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00ca"),
+ DFA.unpack(u"\1\u00cb"),
+ DFA.unpack(u"\1\u00cc"),
+ DFA.unpack(u"\1\u00cd"),
+ DFA.unpack(u"\1\u00ce"),
+ DFA.unpack(u"\1\u00cf"),
+ DFA.unpack(u"\1\u00d0"),
+ DFA.unpack(u"\1\u00d1"),
+ DFA.unpack(u"\1\u00d2"),
+ DFA.unpack(u"\1\u00d3"),
+ DFA.unpack(u"\1\u00d4"),
+ DFA.unpack(u"\1\u00d5"),
+ DFA.unpack(u"\1\u00d6"),
+ DFA.unpack(u"\1\u00d7"),
+ DFA.unpack(u"\1\u00d8"),
+ DFA.unpack(u"\1\u00d9"),
+ DFA.unpack(u"\1\u00da"),
+ DFA.unpack(u"\1\u00dc\1\u00db"),
+ DFA.unpack(u"\1\u00dd"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00df"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00e1"),
+ DFA.unpack(u"\1\u00e2"),
+ DFA.unpack(u"\1\u00e3"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00e4"),
+ DFA.unpack(u"\1\u00e5"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00e6"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00e8"),
+ DFA.unpack(u"\1\u00e9"),
+ DFA.unpack(u"\1\u00ea"),
+ DFA.unpack(u"\1\u00eb"),
+ DFA.unpack(u"\1\u00ed\35\uffff\1\u00ec"),
+ DFA.unpack(u"\1\u00ee"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00ef"),
+ DFA.unpack(u"\1\u00f0"),
+ DFA.unpack(u"\1\u00f1"),
+ DFA.unpack(u"\1\u00f2"),
+ DFA.unpack(u"\1\u00f3"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00f4"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00f6"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00f8"),
+ DFA.unpack(u"\1\u00f9"),
+ DFA.unpack(u"\1\u00fa"),
+ DFA.unpack(u"\1\u00fb"),
+ DFA.unpack(u"\1\u00fc"),
+ DFA.unpack(u"\1\u00fd"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00ff"),
+ DFA.unpack(u"\1\u0100"),
+ DFA.unpack(u"\1\u0101"),
+ DFA.unpack(u"\1\u0102"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0105"),
+ DFA.unpack(u"\1\u0106"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0109"),
+ DFA.unpack(u"\1\u010a"),
+ DFA.unpack(u"\1\u010b"),
+ DFA.unpack(u"\1\u010c"),
+ DFA.unpack(u"\1\u010d"),
+ DFA.unpack(u"\1\u010e"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u010f"),
+ DFA.unpack(u"\1\u0110"),
+ DFA.unpack(u"\1\u0111"),
+ DFA.unpack(u"\1\u0112"),
+ DFA.unpack(u"\1\u0114\17\uffff\1\u0113"),
+ DFA.unpack(u"\1\u0115"),
+ DFA.unpack(u"\1\u0116"),
+ DFA.unpack(u"\1\u0117"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0119"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u011b"),
+ DFA.unpack(u"\1\u011c"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u011d"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u011e"),
+ DFA.unpack(u"\1\u011f"),
+ DFA.unpack(u"\1\u0120"),
+ DFA.unpack(u"\1\u0121"),
+ DFA.unpack(u"\1\u0122"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0124"),
+ DFA.unpack(u"\1\u0125"),
+ DFA.unpack(u"\1\u0126"),
+ DFA.unpack(u"\1\u0127"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0128"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u012b"),
+ DFA.unpack(u"\1\u012c"),
+ DFA.unpack(u"\1\u012d"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u012f"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0131"),
+ DFA.unpack(u"\1\u0132"),
+ DFA.unpack(u"\1\u0133"),
+ DFA.unpack(u"\1\u0134"),
+ DFA.unpack(u"\1\u0135"),
+ DFA.unpack(u"\1\u0136"),
+ DFA.unpack(u"\1\u0137"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\u0138\1"
+ u"\uffff\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u013c"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0143"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0146"),
+ DFA.unpack(u"\1\u0147"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0148"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u014a"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u014b"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u014c"),
+ DFA.unpack(u"\1\u014d"),
+ DFA.unpack(u"\1\u014e"),
+ DFA.unpack(u"\1\u014f"),
+ DFA.unpack(u"\1\u0150"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0153"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0155"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0156"),
+ DFA.unpack(u"\1\u0157"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0159"),
+ DFA.unpack(u"\1\u015a"),
+ DFA.unpack(u"\1\u015b"),
+ DFA.unpack(u"\1\u015c"),
+ DFA.unpack(u"\1\u015d"),
+ DFA.unpack(u"\1\u015e"),
+ DFA.unpack(u"\1\u015f"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0166"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0168"),
+ DFA.unpack(u"\1\u0169"),
+ DFA.unpack(u"\1\u016a"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u016c"),
+ DFA.unpack(u"\1\u016d"),
+ DFA.unpack(u"\1\u016e"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u016f"),
+ DFA.unpack(u"\1\u0170"),
+ DFA.unpack(u"\1\u0171"),
+ DFA.unpack(u"\1\u0172"),
+ DFA.unpack(u"\1\u0173"),
+ DFA.unpack(u"\1\u0174"),
+ DFA.unpack(u"\1\u0175"),
+ DFA.unpack(u"\1\u0176"),
+ DFA.unpack(u"\1\u0177"),
+ DFA.unpack(u"\1\u0178"),
+ DFA.unpack(u"\1\u0179"),
+ DFA.unpack(u"\1\u017a"),
+ DFA.unpack(u"\1\u017b"),
+ DFA.unpack(u"\1\u017c"),
+ DFA.unpack(u"\1\u017d"),
+ DFA.unpack(u"\1\u017e"),
+ DFA.unpack(u"\1\u017f"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0181"),
+ DFA.unpack(u"\1\u0182"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0183"),
+ DFA.unpack(u"\1\u0184"),
+ DFA.unpack(u"\1\u0185"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0187"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0188"),
+ DFA.unpack(u"\1\u0189"),
+ DFA.unpack(u"\1\u018a"),
+ DFA.unpack(u"\1\u018b"),
+ DFA.unpack(u"\1\u018c"),
+ DFA.unpack(u"\1\u018d"),
+ DFA.unpack(u"\1\u018e"),
+ DFA.unpack(u"\1\u018f"),
+ DFA.unpack(u"\1\u0190"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"")
+ ]
+
+ # class definition for DFA #35
+
+ DFA35 = DFA
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser3/CParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser3/CParser.py
new file mode 100755
index 00000000..cd460127
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser3/CParser.py
@@ -0,0 +1,18833 @@
+# $ANTLR 3.0.1 C.g 2010-02-23 09:58:53
+
+from __future__ import print_function
+from __future__ import absolute_import
+from antlr3 import *
+from antlr3.compat import set, frozenset
+
+## @file
+# The file defines the parser for C source files.
+#
+# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+from Ecc import CodeFragment
+from Ecc import FileProfile
+
+
+
+# for convenience in actions
+HIDDEN = BaseRecognizer.HIDDEN
+
+# token types
+BS=20
+LINE_COMMENT=23
+FloatTypeSuffix=16
+IntegerTypeSuffix=14
+LETTER=11
+OCTAL_LITERAL=6
+CHARACTER_LITERAL=8
+Exponent=15
+EOF=-1
+HexDigit=13
+STRING_LITERAL=9
+WS=19
+FLOATING_POINT_LITERAL=10
+IDENTIFIER=4
+UnicodeEscape=18
+LINE_COMMAND=24
+UnicodeVocabulary=21
+HEX_LITERAL=5
+COMMENT=22
+DECIMAL_LITERAL=7
+EscapeSequence=12
+OctalEscape=17
+
+# token names
+tokenNames = [
+ "<invalid>", "<EOR>", "<DOWN>", "<UP>",
+ "IDENTIFIER", "HEX_LITERAL", "OCTAL_LITERAL", "DECIMAL_LITERAL", "CHARACTER_LITERAL",
+ "STRING_LITERAL", "FLOATING_POINT_LITERAL", "LETTER", "EscapeSequence",
+ "HexDigit", "IntegerTypeSuffix", "Exponent", "FloatTypeSuffix", "OctalEscape",
+ "UnicodeEscape", "WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
+ "LINE_COMMAND", "';'", "'typedef'", "','", "'='", "'extern'", "'static'",
+ "'auto'", "'register'", "'STATIC'", "'void'", "'char'", "'short'", "'int'",
+ "'long'", "'float'", "'double'", "'signed'", "'unsigned'", "'{'", "'}'",
+ "'struct'", "'union'", "':'", "'enum'", "'const'", "'volatile'", "'IN'",
+ "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'", "'VOLATILE'", "'GLOBAL_REMOVE_IF_UNREFERENCED'",
+ "'EFIAPI'", "'EFI_BOOTSERVICE'", "'EFI_RUNTIMESERVICE'", "'PACKED'",
+ "'('", "')'", "'['", "']'", "'*'", "'...'", "'+'", "'-'", "'/'", "'%'",
+ "'++'", "'--'", "'sizeof'", "'.'", "'->'", "'&'", "'~'", "'!'", "'*='",
+ "'/='", "'%='", "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
+ "'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='", "'<'", "'>'", "'<='",
+ "'>='", "'<<'", "'>>'", "'__asm__'", "'_asm'", "'__asm'", "'case'",
+ "'default'", "'if'", "'else'", "'switch'", "'while'", "'do'", "'for'",
+ "'goto'", "'continue'", "'break'", "'return'"
+]
+
+
+class function_definition_scope(object):
+ def __init__(self):
+ self.ModifierText = None
+ self.DeclText = None
+ self.LBLine = None
+ self.LBOffset = None
+ self.DeclLine = None
+ self.DeclOffset = None
+class postfix_expression_scope(object):
+ def __init__(self):
+ self.FuncCallText = None
+
+
+class CParser(Parser):
+ grammarFileName = "C.g"
+ tokenNames = tokenNames
+
+ def __init__(self, input):
+ Parser.__init__(self, input)
+ self.ruleMemo = {}
+
+ self.function_definition_stack = []
+ self.postfix_expression_stack = []
+
+ def printTokenInfo(self, line, offset, tokenText):
+ print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
+
+ def StorePredicateExpression(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.PredicateExpressionList.append(PredExp)
+
+ def StoreEnumerationDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.EnumerationDefinitionList.append(EnumDef)
+
+ def StoreStructUnionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.StructUnionDefinitionList.append(SUDef)
+
+ def StoreTypedefDefinition(self, StartLine, StartOffset, EndLine, EndOffset, FromText, ToText):
+ Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.TypedefDefinitionList.append(Tdef)
+
+ def StoreFunctionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText, LeftBraceLine, LeftBraceOffset, DeclLine, DeclOffset):
+ FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
+ FileProfile.FunctionDefinitionList.append(FuncDef)
+
+ def StoreVariableDeclaration(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText):
+ VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.VariableDeclarationList.append(VarDecl)
+
+ def StoreFunctionCalling(self, StartLine, StartOffset, EndLine, EndOffset, FuncName, ParamList):
+ FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.FunctionCallingList.append(FuncCall)
+
+
+
+
+ # $ANTLR start translation_unit
+ # C.g:102:1: translation_unit : ( external_declaration )* ;
+ def translation_unit(self, ):
+
+ translation_unit_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 1):
+ return
+
+ # C.g:103:2: ( ( external_declaration )* )
+ # C.g:103:4: ( external_declaration )*
+ # C.g:103:4: ( external_declaration )*
+ while True: #loop1
+ alt1 = 2
+ LA1_0 = self.input.LA(1)
+
+ if (LA1_0 == IDENTIFIER or LA1_0 == 26 or (29 <= LA1_0 <= 42) or (45 <= LA1_0 <= 46) or (48 <= LA1_0 <= 62) or LA1_0 == 66) :
+ alt1 = 1
+
+
+ if alt1 == 1:
+ # C.g:0:0: external_declaration
+ self.following.append(self.FOLLOW_external_declaration_in_translation_unit74)
+ self.external_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop1
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 1, translation_unit_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end translation_unit
+
+
+ # $ANTLR start external_declaration
+ # C.g:114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );
+ def external_declaration(self, ):
+
+ external_declaration_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 2):
+ return
+
+ # C.g:119:2: ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? )
+ alt3 = 3
+ LA3_0 = self.input.LA(1)
+
+ if ((29 <= LA3_0 <= 33)) :
+ LA3_1 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 1, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 34) :
+ LA3_2 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 2, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 35) :
+ LA3_3 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 3, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 36) :
+ LA3_4 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 4, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 37) :
+ LA3_5 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 5, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 38) :
+ LA3_6 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 6, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 39) :
+ LA3_7 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 7, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 40) :
+ LA3_8 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 8, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 41) :
+ LA3_9 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 9, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 42) :
+ LA3_10 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 10, self.input)
+
+ raise nvae
+
+ elif ((45 <= LA3_0 <= 46)) :
+ LA3_11 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 11, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 48) :
+ LA3_12 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 12, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == IDENTIFIER) :
+ LA3_13 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ elif (True) :
+ alt3 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 13, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 58) :
+ LA3_14 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 14, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 66) and (self.synpred4()):
+ alt3 = 1
+ elif (LA3_0 == 59) :
+ LA3_16 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 16, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 60) :
+ LA3_17 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 17, self.input)
+
+ raise nvae
+
+ elif ((49 <= LA3_0 <= 57) or LA3_0 == 61) :
+ LA3_18 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 18, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 62) and (self.synpred4()):
+ alt3 = 1
+ elif (LA3_0 == 26) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 0, self.input)
+
+ raise nvae
+
+ if alt3 == 1:
+ # C.g:119:4: ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition
+ self.following.append(self.FOLLOW_function_definition_in_external_declaration113)
+ self.function_definition()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt3 == 2:
+ # C.g:120:4: declaration
+ self.following.append(self.FOLLOW_declaration_in_external_declaration118)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt3 == 3:
+ # C.g:121:4: macro_statement ( ';' )?
+ self.following.append(self.FOLLOW_macro_statement_in_external_declaration123)
+ self.macro_statement()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:121:20: ( ';' )?
+ alt2 = 2
+ LA2_0 = self.input.LA(1)
+
+ if (LA2_0 == 25) :
+ alt2 = 1
+ if alt2 == 1:
+ # C.g:121:21: ';'
+ self.match(self.input, 25, self.FOLLOW_25_in_external_declaration126)
+ if self.failed:
+ return
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 2, external_declaration_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end external_declaration
+
+ class function_definition_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start function_definition
+ # C.g:126:1: function_definition : (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement ) ;
+ def function_definition(self, ):
+ self.function_definition_stack.append(function_definition_scope())
+ retval = self.function_definition_return()
+ retval.start = self.input.LT(1)
+ function_definition_StartIndex = self.input.index()
+ d = None
+
+ a = None
+
+ b = None
+
+ declarator1 = None
+
+
+
+ self.function_definition_stack[-1].ModifierText = ''
+ self.function_definition_stack[-1].DeclText = ''
+ self.function_definition_stack[-1].LBLine = 0
+ self.function_definition_stack[-1].LBOffset = 0
+ self.function_definition_stack[-1].DeclLine = 0
+ self.function_definition_stack[-1].DeclOffset = 0
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 3):
+ return retval
+
+ # C.g:146:2: ( (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement ) )
+ # C.g:146:4: (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement )
+ # C.g:146:5: (d= declaration_specifiers )?
+ alt4 = 2
+ LA4 = self.input.LA(1)
+ if LA4 == 29 or LA4 == 30 or LA4 == 31 or LA4 == 32 or LA4 == 33 or LA4 == 34 or LA4 == 35 or LA4 == 36 or LA4 == 37 or LA4 == 38 or LA4 == 39 or LA4 == 40 or LA4 == 41 or LA4 == 42 or LA4 == 45 or LA4 == 46 or LA4 == 48 or LA4 == 49 or LA4 == 50 or LA4 == 51 or LA4 == 52 or LA4 == 53 or LA4 == 54 or LA4 == 55 or LA4 == 56 or LA4 == 57 or LA4 == 61:
+ alt4 = 1
+ elif LA4 == IDENTIFIER:
+ LA4 = self.input.LA(2)
+ if LA4 == 66:
+ alt4 = 1
+ elif LA4 == 58:
+ LA4_21 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 59:
+ LA4_22 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 60:
+ LA4_23 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == IDENTIFIER:
+ LA4_24 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 62:
+ LA4_25 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 29 or LA4 == 30 or LA4 == 31 or LA4 == 32 or LA4 == 33:
+ LA4_26 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 34:
+ LA4_27 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 35:
+ LA4_28 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 36:
+ LA4_29 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 37:
+ LA4_30 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 38:
+ LA4_31 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 39:
+ LA4_32 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 40:
+ LA4_33 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 41:
+ LA4_34 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 42:
+ LA4_35 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 45 or LA4 == 46:
+ LA4_36 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 48:
+ LA4_37 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 49 or LA4 == 50 or LA4 == 51 or LA4 == 52 or LA4 == 53 or LA4 == 54 or LA4 == 55 or LA4 == 56 or LA4 == 57 or LA4 == 61:
+ LA4_38 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 58:
+ LA4_14 = self.input.LA(2)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 59:
+ LA4_16 = self.input.LA(2)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 60:
+ LA4_17 = self.input.LA(2)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ if alt4 == 1:
+ # C.g:0:0: d= declaration_specifiers
+ self.following.append(self.FOLLOW_declaration_specifiers_in_function_definition157)
+ d = self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+
+ self.following.append(self.FOLLOW_declarator_in_function_definition160)
+ declarator1 = self.declarator()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:147:3: ( ( declaration )+ a= compound_statement | b= compound_statement )
+ alt6 = 2
+ LA6_0 = self.input.LA(1)
+
+ if (LA6_0 == IDENTIFIER or LA6_0 == 26 or (29 <= LA6_0 <= 42) or (45 <= LA6_0 <= 46) or (48 <= LA6_0 <= 61)) :
+ alt6 = 1
+ elif (LA6_0 == 43) :
+ alt6 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("147:3: ( ( declaration )+ a= compound_statement | b= compound_statement )", 6, 0, self.input)
+
+ raise nvae
+
+ if alt6 == 1:
+ # C.g:147:5: ( declaration )+ a= compound_statement
+ # C.g:147:5: ( declaration )+
+ cnt5 = 0
+ while True: #loop5
+ alt5 = 2
+ LA5_0 = self.input.LA(1)
+
+ if (LA5_0 == IDENTIFIER or LA5_0 == 26 or (29 <= LA5_0 <= 42) or (45 <= LA5_0 <= 46) or (48 <= LA5_0 <= 61)) :
+ alt5 = 1
+
+
+ if alt5 == 1:
+ # C.g:0:0: declaration
+ self.following.append(self.FOLLOW_declaration_in_function_definition166)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ else:
+ if cnt5 >= 1:
+ break #loop5
+
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ eee = EarlyExitException(5, self.input)
+ raise eee
+
+ cnt5 += 1
+
+
+ self.following.append(self.FOLLOW_compound_statement_in_function_definition171)
+ a = self.compound_statement()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ elif alt6 == 2:
+ # C.g:148:5: b= compound_statement
+ self.following.append(self.FOLLOW_compound_statement_in_function_definition180)
+ b = self.compound_statement()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+
+ if self.backtracking == 0:
+
+ if d is not None:
+ self.function_definition_stack[-1].ModifierText = self.input.toString(d.start, d.stop)
+ else:
+ self.function_definition_stack[-1].ModifierText = ''
+ self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start, declarator1.stop)
+ self.function_definition_stack[-1].DeclLine = declarator1.start.line
+ self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine
+ if a is not None:
+ self.function_definition_stack[-1].LBLine = a.start.line
+ self.function_definition_stack[-1].LBOffset = a.start.charPositionInLine
+ else:
+ self.function_definition_stack[-1].LBLine = b.start.line
+ self.function_definition_stack[-1].LBOffset = b.start.charPositionInLine
+
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+ if self.backtracking == 0:
+
+ self.StoreFunctionDefinition(retval.start.line, retval.start.charPositionInLine, retval.stop.line, retval.stop.charPositionInLine, self.function_definition_stack[-1].ModifierText, self.function_definition_stack[-1].DeclText, self.function_definition_stack[-1].LBLine, self.function_definition_stack[-1].LBOffset, self.function_definition_stack[-1].DeclLine, self.function_definition_stack[-1].DeclOffset)
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 3, function_definition_StartIndex)
+
+ self.function_definition_stack.pop()
+ pass
+
+ return retval
+
+ # $ANTLR end function_definition
+
+
+ # $ANTLR start declaration
+ # C.g:166:1: declaration : (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' );
+ def declaration(self, ):
+
+ declaration_StartIndex = self.input.index()
+ a = None
+ d = None
+ e = None
+ b = None
+
+ c = None
+
+ s = None
+
+ t = None
+
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 4):
+ return
+
+ # C.g:167:2: (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' )
+ alt9 = 2
+ LA9_0 = self.input.LA(1)
+
+ if (LA9_0 == 26) :
+ alt9 = 1
+ elif (LA9_0 == IDENTIFIER or (29 <= LA9_0 <= 42) or (45 <= LA9_0 <= 46) or (48 <= LA9_0 <= 61)) :
+ alt9 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("166:1: declaration : (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' );", 9, 0, self.input)
+
+ raise nvae
+
+ if alt9 == 1:
+ # C.g:167:4: a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';'
+ a = self.input.LT(1)
+ self.match(self.input, 26, self.FOLLOW_26_in_declaration203)
+ if self.failed:
+ return
+ # C.g:167:17: (b= declaration_specifiers )?
+ alt7 = 2
+ LA7 = self.input.LA(1)
+ if LA7 == 29 or LA7 == 30 or LA7 == 31 or LA7 == 32 or LA7 == 33 or LA7 == 34 or LA7 == 35 or LA7 == 36 or LA7 == 37 or LA7 == 38 or LA7 == 39 or LA7 == 40 or LA7 == 41 or LA7 == 42 or LA7 == 45 or LA7 == 46 or LA7 == 48 or LA7 == 49 or LA7 == 50 or LA7 == 51 or LA7 == 52 or LA7 == 53 or LA7 == 54 or LA7 == 55 or LA7 == 56 or LA7 == 57 or LA7 == 61:
+ alt7 = 1
+ elif LA7 == IDENTIFIER:
+ LA7_13 = self.input.LA(2)
+
+ if (LA7_13 == 62) :
+ LA7_21 = self.input.LA(3)
+
+ if (self.synpred10()) :
+ alt7 = 1
+ elif (LA7_13 == IDENTIFIER or (29 <= LA7_13 <= 42) or (45 <= LA7_13 <= 46) or (48 <= LA7_13 <= 61) or LA7_13 == 66) :
+ alt7 = 1
+ elif LA7 == 58:
+ LA7_14 = self.input.LA(2)
+
+ if (self.synpred10()) :
+ alt7 = 1
+ elif LA7 == 59:
+ LA7_16 = self.input.LA(2)
+
+ if (self.synpred10()) :
+ alt7 = 1
+ elif LA7 == 60:
+ LA7_17 = self.input.LA(2)
+
+ if (self.synpred10()) :
+ alt7 = 1
+ if alt7 == 1:
+ # C.g:0:0: b= declaration_specifiers
+ self.following.append(self.FOLLOW_declaration_specifiers_in_declaration207)
+ b = self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ self.following.append(self.FOLLOW_init_declarator_list_in_declaration216)
+ c = self.init_declarator_list()
+ self.following.pop()
+ if self.failed:
+ return
+ d = self.input.LT(1)
+ self.match(self.input, 25, self.FOLLOW_25_in_declaration220)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+
+ if b is not None:
+ self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start, b.stop), self.input.toString(c.start, c.stop))
+ else:
+ self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start, c.stop))
+
+
+
+
+ elif alt9 == 2:
+ # C.g:175:4: s= declaration_specifiers (t= init_declarator_list )? e= ';'
+ self.following.append(self.FOLLOW_declaration_specifiers_in_declaration234)
+ s = self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:175:30: (t= init_declarator_list )?
+ alt8 = 2
+ LA8_0 = self.input.LA(1)
+
+ if (LA8_0 == IDENTIFIER or (58 <= LA8_0 <= 60) or LA8_0 == 62 or LA8_0 == 66) :
+ alt8 = 1
+ if alt8 == 1:
+ # C.g:0:0: t= init_declarator_list
+ self.following.append(self.FOLLOW_init_declarator_list_in_declaration238)
+ t = self.init_declarator_list()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ e = self.input.LT(1)
+ self.match(self.input, 25, self.FOLLOW_25_in_declaration243)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+
+ if t is not None:
+ self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start, s.stop), self.input.toString(t.start, t.stop))
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 4, declaration_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end declaration
+
+ class declaration_specifiers_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start declaration_specifiers
+ # C.g:182:1: declaration_specifiers : ( storage_class_specifier | type_specifier | type_qualifier )+ ;
+ def declaration_specifiers(self, ):
+
+ retval = self.declaration_specifiers_return()
+ retval.start = self.input.LT(1)
+ declaration_specifiers_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 5):
+ return retval
+
+ # C.g:183:2: ( ( storage_class_specifier | type_specifier | type_qualifier )+ )
+ # C.g:183:6: ( storage_class_specifier | type_specifier | type_qualifier )+
+ # C.g:183:6: ( storage_class_specifier | type_specifier | type_qualifier )+
+ cnt10 = 0
+ while True: #loop10
+ alt10 = 4
+ LA10 = self.input.LA(1)
+ if LA10 == 58:
+ LA10_2 = self.input.LA(2)
+
+ if (self.synpred15()) :
+ alt10 = 3
+
+
+ elif LA10 == 59:
+ LA10_3 = self.input.LA(2)
+
+ if (self.synpred15()) :
+ alt10 = 3
+
+
+ elif LA10 == 60:
+ LA10_4 = self.input.LA(2)
+
+ if (self.synpred15()) :
+ alt10 = 3
+
+
+ elif LA10 == IDENTIFIER:
+ LA10_5 = self.input.LA(2)
+
+ if (self.synpred14()) :
+ alt10 = 2
+
+
+ elif LA10 == 53:
+ LA10_9 = self.input.LA(2)
+
+ if (self.synpred15()) :
+ alt10 = 3
+
+
+ elif LA10 == 29 or LA10 == 30 or LA10 == 31 or LA10 == 32 or LA10 == 33:
+ alt10 = 1
+ elif LA10 == 34 or LA10 == 35 or LA10 == 36 or LA10 == 37 or LA10 == 38 or LA10 == 39 or LA10 == 40 or LA10 == 41 or LA10 == 42 or LA10 == 45 or LA10 == 46 or LA10 == 48:
+ alt10 = 2
+ elif LA10 == 49 or LA10 == 50 or LA10 == 51 or LA10 == 52 or LA10 == 54 or LA10 == 55 or LA10 == 56 or LA10 == 57 or LA10 == 61:
+ alt10 = 3
+
+ if alt10 == 1:
+ # C.g:183:10: storage_class_specifier
+ self.following.append(self.FOLLOW_storage_class_specifier_in_declaration_specifiers264)
+ self.storage_class_specifier()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ elif alt10 == 2:
+ # C.g:184:7: type_specifier
+ self.following.append(self.FOLLOW_type_specifier_in_declaration_specifiers272)
+ self.type_specifier()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ elif alt10 == 3:
+ # C.g:185:13: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_declaration_specifiers286)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ else:
+ if cnt10 >= 1:
+ break #loop10
+
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ eee = EarlyExitException(10, self.input)
+ raise eee
+
+ cnt10 += 1
+
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 5, declaration_specifiers_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end declaration_specifiers
+
+ class init_declarator_list_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start init_declarator_list
+ # C.g:189:1: init_declarator_list : init_declarator ( ',' init_declarator )* ;
+ def init_declarator_list(self, ):
+
+ retval = self.init_declarator_list_return()
+ retval.start = self.input.LT(1)
+ init_declarator_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 6):
+ return retval
+
+ # C.g:190:2: ( init_declarator ( ',' init_declarator )* )
+ # C.g:190:4: init_declarator ( ',' init_declarator )*
+ self.following.append(self.FOLLOW_init_declarator_in_init_declarator_list308)
+ self.init_declarator()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:190:20: ( ',' init_declarator )*
+ while True: #loop11
+ alt11 = 2
+ LA11_0 = self.input.LA(1)
+
+ if (LA11_0 == 27) :
+ alt11 = 1
+
+
+ if alt11 == 1:
+ # C.g:190:21: ',' init_declarator
+ self.match(self.input, 27, self.FOLLOW_27_in_init_declarator_list311)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_init_declarator_in_init_declarator_list313)
+ self.init_declarator()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ else:
+ break #loop11
+
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 6, init_declarator_list_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end init_declarator_list
+
+
+ # $ANTLR start init_declarator
+ # C.g:193:1: init_declarator : declarator ( '=' initializer )? ;
+ def init_declarator(self, ):
+
+ init_declarator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 7):
+ return
+
+ # C.g:194:2: ( declarator ( '=' initializer )? )
+ # C.g:194:4: declarator ( '=' initializer )?
+ self.following.append(self.FOLLOW_declarator_in_init_declarator326)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:194:15: ( '=' initializer )?
+ alt12 = 2
+ LA12_0 = self.input.LA(1)
+
+ if (LA12_0 == 28) :
+ alt12 = 1
+ if alt12 == 1:
+ # C.g:194:16: '=' initializer
+ self.match(self.input, 28, self.FOLLOW_28_in_init_declarator329)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_initializer_in_init_declarator331)
+ self.initializer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 7, init_declarator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end init_declarator
+
+
+ # $ANTLR start storage_class_specifier
+ # C.g:197:1: storage_class_specifier : ( 'extern' | 'static' | 'auto' | 'register' | 'STATIC' );
+ def storage_class_specifier(self, ):
+
+ storage_class_specifier_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 8):
+ return
+
+ # C.g:198:2: ( 'extern' | 'static' | 'auto' | 'register' | 'STATIC' )
+ # C.g:
+ if (29 <= self.input.LA(1) <= 33):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_storage_class_specifier0
+ )
+ raise mse
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 8, storage_class_specifier_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end storage_class_specifier
+
+
+ # $ANTLR start type_specifier
+ # C.g:205:1: type_specifier : ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id );
+ def type_specifier(self, ):
+
+ type_specifier_StartIndex = self.input.index()
+ s = None
+
+ e = None
+
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 9):
+ return
+
+ # C.g:206:2: ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id )
+ alt13 = 12
+ LA13_0 = self.input.LA(1)
+
+ if (LA13_0 == 34) :
+ alt13 = 1
+ elif (LA13_0 == 35) :
+ alt13 = 2
+ elif (LA13_0 == 36) :
+ alt13 = 3
+ elif (LA13_0 == 37) :
+ alt13 = 4
+ elif (LA13_0 == 38) :
+ alt13 = 5
+ elif (LA13_0 == 39) :
+ alt13 = 6
+ elif (LA13_0 == 40) :
+ alt13 = 7
+ elif (LA13_0 == 41) :
+ alt13 = 8
+ elif (LA13_0 == 42) :
+ alt13 = 9
+ elif ((45 <= LA13_0 <= 46)) :
+ alt13 = 10
+ elif (LA13_0 == 48) :
+ alt13 = 11
+ elif (LA13_0 == IDENTIFIER) and (self.synpred34()):
+ alt13 = 12
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("205:1: type_specifier : ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id );", 13, 0, self.input)
+
+ raise nvae
+
+ if alt13 == 1:
+ # C.g:206:4: 'void'
+ self.match(self.input, 34, self.FOLLOW_34_in_type_specifier376)
+ if self.failed:
+ return
+
+
+ elif alt13 == 2:
+ # C.g:207:4: 'char'
+ self.match(self.input, 35, self.FOLLOW_35_in_type_specifier381)
+ if self.failed:
+ return
+
+
+ elif alt13 == 3:
+ # C.g:208:4: 'short'
+ self.match(self.input, 36, self.FOLLOW_36_in_type_specifier386)
+ if self.failed:
+ return
+
+
+ elif alt13 == 4:
+ # C.g:209:4: 'int'
+ self.match(self.input, 37, self.FOLLOW_37_in_type_specifier391)
+ if self.failed:
+ return
+
+
+ elif alt13 == 5:
+ # C.g:210:4: 'long'
+ self.match(self.input, 38, self.FOLLOW_38_in_type_specifier396)
+ if self.failed:
+ return
+
+
+ elif alt13 == 6:
+ # C.g:211:4: 'float'
+ self.match(self.input, 39, self.FOLLOW_39_in_type_specifier401)
+ if self.failed:
+ return
+
+
+ elif alt13 == 7:
+ # C.g:212:4: 'double'
+ self.match(self.input, 40, self.FOLLOW_40_in_type_specifier406)
+ if self.failed:
+ return
+
+
+ elif alt13 == 8:
+ # C.g:213:4: 'signed'
+ self.match(self.input, 41, self.FOLLOW_41_in_type_specifier411)
+ if self.failed:
+ return
+
+
+ elif alt13 == 9:
+ # C.g:214:4: 'unsigned'
+ self.match(self.input, 42, self.FOLLOW_42_in_type_specifier416)
+ if self.failed:
+ return
+
+
+ elif alt13 == 10:
+ # C.g:215:4: s= struct_or_union_specifier
+ self.following.append(self.FOLLOW_struct_or_union_specifier_in_type_specifier423)
+ s = self.struct_or_union_specifier()
+ self.following.pop()
+ if self.failed:
+ return
+ if self.backtracking == 0:
+
+ if s.stop is not None:
+ self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start, s.stop))
+
+
+
+
+ elif alt13 == 11:
+ # C.g:220:4: e= enum_specifier
+ self.following.append(self.FOLLOW_enum_specifier_in_type_specifier433)
+ e = self.enum_specifier()
+ self.following.pop()
+ if self.failed:
+ return
+ if self.backtracking == 0:
+
+ if e.stop is not None:
+ self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
+
+
+
+
+ elif alt13 == 12:
+ # C.g:225:4: ( IDENTIFIER ( type_qualifier )* declarator )=> type_id
+ self.following.append(self.FOLLOW_type_id_in_type_specifier451)
+ self.type_id()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 9, type_specifier_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end type_specifier
+
+
+ # $ANTLR start type_id
+ # C.g:228:1: type_id : IDENTIFIER ;
+ def type_id(self, ):
+
+ type_id_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 10):
+ return
+
+ # C.g:229:5: ( IDENTIFIER )
+ # C.g:229:9: IDENTIFIER
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_type_id467)
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 10, type_id_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end type_id
+
+ class struct_or_union_specifier_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start struct_or_union_specifier
+ # C.g:233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );
+ def struct_or_union_specifier(self, ):
+
+ retval = self.struct_or_union_specifier_return()
+ retval.start = self.input.LT(1)
+ struct_or_union_specifier_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 11):
+ return retval
+
+ # C.g:235:2: ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER )
+ alt15 = 2
+ LA15_0 = self.input.LA(1)
+
+ if ((45 <= LA15_0 <= 46)) :
+ LA15_1 = self.input.LA(2)
+
+ if (LA15_1 == IDENTIFIER) :
+ LA15_2 = self.input.LA(3)
+
+ if (LA15_2 == 43) :
+ alt15 = 1
+ elif (LA15_2 == EOF or LA15_2 == IDENTIFIER or LA15_2 == 25 or LA15_2 == 27 or (29 <= LA15_2 <= 42) or (45 <= LA15_2 <= 64) or LA15_2 == 66) :
+ alt15 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 2, self.input)
+
+ raise nvae
+
+ elif (LA15_1 == 43) :
+ alt15 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 1, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 0, self.input)
+
+ raise nvae
+
+ if alt15 == 1:
+ # C.g:235:4: struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}'
+ self.following.append(self.FOLLOW_struct_or_union_in_struct_or_union_specifier494)
+ self.struct_or_union()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:235:20: ( IDENTIFIER )?
+ alt14 = 2
+ LA14_0 = self.input.LA(1)
+
+ if (LA14_0 == IDENTIFIER) :
+ alt14 = 1
+ if alt14 == 1:
+ # C.g:0:0: IDENTIFIER
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_struct_or_union_specifier496)
+ if self.failed:
+ return retval
+
+
+
+ self.match(self.input, 43, self.FOLLOW_43_in_struct_or_union_specifier499)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_struct_declaration_list_in_struct_or_union_specifier501)
+ self.struct_declaration_list()
+ self.following.pop()
+ if self.failed:
+ return retval
+ self.match(self.input, 44, self.FOLLOW_44_in_struct_or_union_specifier503)
+ if self.failed:
+ return retval
+
+
+ elif alt15 == 2:
+ # C.g:236:4: struct_or_union IDENTIFIER
+ self.following.append(self.FOLLOW_struct_or_union_in_struct_or_union_specifier508)
+ self.struct_or_union()
+ self.following.pop()
+ if self.failed:
+ return retval
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_struct_or_union_specifier510)
+ if self.failed:
+ return retval
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 11, struct_or_union_specifier_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end struct_or_union_specifier
+
+
+ # $ANTLR start struct_or_union
+ # C.g:239:1: struct_or_union : ( 'struct' | 'union' );
+ def struct_or_union(self, ):
+
+ struct_or_union_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 12):
+ return
+
+ # C.g:240:2: ( 'struct' | 'union' )
+ # C.g:
+ if (45 <= self.input.LA(1) <= 46):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_struct_or_union0
+ )
+ raise mse
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 12, struct_or_union_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end struct_or_union
+
+
+ # $ANTLR start struct_declaration_list
+ # C.g:244:1: struct_declaration_list : ( struct_declaration )+ ;
+ def struct_declaration_list(self, ):
+
+ struct_declaration_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 13):
+ return
+
+ # C.g:245:2: ( ( struct_declaration )+ )
+ # C.g:245:4: ( struct_declaration )+
+ # C.g:245:4: ( struct_declaration )+
+ cnt16 = 0
+ while True: #loop16
+ alt16 = 2
+ LA16_0 = self.input.LA(1)
+
+ if (LA16_0 == IDENTIFIER or (34 <= LA16_0 <= 42) or (45 <= LA16_0 <= 46) or (48 <= LA16_0 <= 61)) :
+ alt16 = 1
+
+
+ if alt16 == 1:
+ # C.g:0:0: struct_declaration
+ self.following.append(self.FOLLOW_struct_declaration_in_struct_declaration_list537)
+ self.struct_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ if cnt16 >= 1:
+ break #loop16
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(16, self.input)
+ raise eee
+
+ cnt16 += 1
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 13, struct_declaration_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end struct_declaration_list
+
+
+ # $ANTLR start struct_declaration
+ # C.g:248:1: struct_declaration : specifier_qualifier_list struct_declarator_list ';' ;
+ def struct_declaration(self, ):
+
+ struct_declaration_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 14):
+ return
+
+ # C.g:249:2: ( specifier_qualifier_list struct_declarator_list ';' )
+ # C.g:249:4: specifier_qualifier_list struct_declarator_list ';'
+ self.following.append(self.FOLLOW_specifier_qualifier_list_in_struct_declaration549)
+ self.specifier_qualifier_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_struct_declarator_list_in_struct_declaration551)
+ self.struct_declarator_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_struct_declaration553)
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 14, struct_declaration_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end struct_declaration
+
+
+ # $ANTLR start specifier_qualifier_list
+ # C.g:252:1: specifier_qualifier_list : ( type_qualifier | type_specifier )+ ;
+ def specifier_qualifier_list(self, ):
+
+ specifier_qualifier_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 15):
+ return
+
+ # C.g:253:2: ( ( type_qualifier | type_specifier )+ )
+ # C.g:253:4: ( type_qualifier | type_specifier )+
+ # C.g:253:4: ( type_qualifier | type_specifier )+
+ cnt17 = 0
+ while True: #loop17
+ alt17 = 3
+ LA17 = self.input.LA(1)
+ if LA17 == 58:
+ LA17_2 = self.input.LA(2)
+
+ if (self.synpred39()) :
+ alt17 = 1
+
+
+ elif LA17 == 59:
+ LA17_3 = self.input.LA(2)
+
+ if (self.synpred39()) :
+ alt17 = 1
+
+
+ elif LA17 == 60:
+ LA17_4 = self.input.LA(2)
+
+ if (self.synpred39()) :
+ alt17 = 1
+
+
+ elif LA17 == IDENTIFIER:
+ LA17 = self.input.LA(2)
+ if LA17 == EOF or LA17 == IDENTIFIER or LA17 == 34 or LA17 == 35 or LA17 == 36 or LA17 == 37 or LA17 == 38 or LA17 == 39 or LA17 == 40 or LA17 == 41 or LA17 == 42 or LA17 == 45 or LA17 == 46 or LA17 == 48 or LA17 == 49 or LA17 == 50 or LA17 == 51 or LA17 == 52 or LA17 == 53 or LA17 == 54 or LA17 == 55 or LA17 == 56 or LA17 == 57 or LA17 == 58 or LA17 == 59 or LA17 == 60 or LA17 == 61 or LA17 == 63 or LA17 == 66:
+ alt17 = 2
+ elif LA17 == 62:
+ LA17_94 = self.input.LA(3)
+
+ if (self.synpred40()) :
+ alt17 = 2
+
+
+ elif LA17 == 47:
+ LA17_95 = self.input.LA(3)
+
+ if (self.synpred40()) :
+ alt17 = 2
+
+
+ elif LA17 == 64:
+ LA17_96 = self.input.LA(3)
+
+ if (self.synpred40()) :
+ alt17 = 2
+
+
+
+ elif LA17 == 49 or LA17 == 50 or LA17 == 51 or LA17 == 52 or LA17 == 53 or LA17 == 54 or LA17 == 55 or LA17 == 56 or LA17 == 57 or LA17 == 61:
+ alt17 = 1
+ elif LA17 == 34 or LA17 == 35 or LA17 == 36 or LA17 == 37 or LA17 == 38 or LA17 == 39 or LA17 == 40 or LA17 == 41 or LA17 == 42 or LA17 == 45 or LA17 == 46 or LA17 == 48:
+ alt17 = 2
+
+ if alt17 == 1:
+ # C.g:253:6: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_specifier_qualifier_list566)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt17 == 2:
+ # C.g:253:23: type_specifier
+ self.following.append(self.FOLLOW_type_specifier_in_specifier_qualifier_list570)
+ self.type_specifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ if cnt17 >= 1:
+ break #loop17
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(17, self.input)
+ raise eee
+
+ cnt17 += 1
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 15, specifier_qualifier_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end specifier_qualifier_list
+
+
+ # $ANTLR start struct_declarator_list
+ # C.g:256:1: struct_declarator_list : struct_declarator ( ',' struct_declarator )* ;
+ def struct_declarator_list(self, ):
+
+ struct_declarator_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 16):
+ return
+
+ # C.g:257:2: ( struct_declarator ( ',' struct_declarator )* )
+ # C.g:257:4: struct_declarator ( ',' struct_declarator )*
+ self.following.append(self.FOLLOW_struct_declarator_in_struct_declarator_list584)
+ self.struct_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:257:22: ( ',' struct_declarator )*
+ while True: #loop18
+ alt18 = 2
+ LA18_0 = self.input.LA(1)
+
+ if (LA18_0 == 27) :
+ alt18 = 1
+
+
+ if alt18 == 1:
+ # C.g:257:23: ',' struct_declarator
+ self.match(self.input, 27, self.FOLLOW_27_in_struct_declarator_list587)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_struct_declarator_in_struct_declarator_list589)
+ self.struct_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop18
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 16, struct_declarator_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end struct_declarator_list
+
+
+ # $ANTLR start struct_declarator
+ # C.g:260:1: struct_declarator : ( declarator ( ':' constant_expression )? | ':' constant_expression );
+ def struct_declarator(self, ):
+
+ struct_declarator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 17):
+ return
+
+ # C.g:261:2: ( declarator ( ':' constant_expression )? | ':' constant_expression )
+ alt20 = 2
+ LA20_0 = self.input.LA(1)
+
+ if (LA20_0 == IDENTIFIER or (58 <= LA20_0 <= 60) or LA20_0 == 62 or LA20_0 == 66) :
+ alt20 = 1
+ elif (LA20_0 == 47) :
+ alt20 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("260:1: struct_declarator : ( declarator ( ':' constant_expression )? | ':' constant_expression );", 20, 0, self.input)
+
+ raise nvae
+
+ if alt20 == 1:
+ # C.g:261:4: declarator ( ':' constant_expression )?
+ self.following.append(self.FOLLOW_declarator_in_struct_declarator602)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:261:15: ( ':' constant_expression )?
+ alt19 = 2
+ LA19_0 = self.input.LA(1)
+
+ if (LA19_0 == 47) :
+ alt19 = 1
+ if alt19 == 1:
+ # C.g:261:16: ':' constant_expression
+ self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator605)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_constant_expression_in_struct_declarator607)
+ self.constant_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ elif alt20 == 2:
+ # C.g:262:4: ':' constant_expression
+ self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator614)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_constant_expression_in_struct_declarator616)
+ self.constant_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 17, struct_declarator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end struct_declarator
+
+ class enum_specifier_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start enum_specifier
+ # C.g:265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );
+ def enum_specifier(self, ):
+
+ retval = self.enum_specifier_return()
+ retval.start = self.input.LT(1)
+ enum_specifier_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 18):
+ return retval
+
+ # C.g:267:2: ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER )
+ alt23 = 3
+ LA23_0 = self.input.LA(1)
+
+ if (LA23_0 == 48) :
+ LA23_1 = self.input.LA(2)
+
+ if (LA23_1 == IDENTIFIER) :
+ LA23_2 = self.input.LA(3)
+
+ if (LA23_2 == 43) :
+ alt23 = 2
+ elif (LA23_2 == EOF or LA23_2 == IDENTIFIER or LA23_2 == 25 or LA23_2 == 27 or (29 <= LA23_2 <= 42) or (45 <= LA23_2 <= 64) or LA23_2 == 66) :
+ alt23 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 2, self.input)
+
+ raise nvae
+
+ elif (LA23_1 == 43) :
+ alt23 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 1, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 0, self.input)
+
+ raise nvae
+
+ if alt23 == 1:
+ # C.g:267:4: 'enum' '{' enumerator_list ( ',' )? '}'
+ self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier634)
+ if self.failed:
+ return retval
+ self.match(self.input, 43, self.FOLLOW_43_in_enum_specifier636)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_enumerator_list_in_enum_specifier638)
+ self.enumerator_list()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:267:31: ( ',' )?
+ alt21 = 2
+ LA21_0 = self.input.LA(1)
+
+ if (LA21_0 == 27) :
+ alt21 = 1
+ if alt21 == 1:
+ # C.g:0:0: ','
+ self.match(self.input, 27, self.FOLLOW_27_in_enum_specifier640)
+ if self.failed:
+ return retval
+
+
+
+ self.match(self.input, 44, self.FOLLOW_44_in_enum_specifier643)
+ if self.failed:
+ return retval
+
+
+ elif alt23 == 2:
+ # C.g:268:4: 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}'
+ self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier648)
+ if self.failed:
+ return retval
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enum_specifier650)
+ if self.failed:
+ return retval
+ self.match(self.input, 43, self.FOLLOW_43_in_enum_specifier652)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_enumerator_list_in_enum_specifier654)
+ self.enumerator_list()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:268:42: ( ',' )?
+ alt22 = 2
+ LA22_0 = self.input.LA(1)
+
+ if (LA22_0 == 27) :
+ alt22 = 1
+ if alt22 == 1:
+ # C.g:0:0: ','
+ self.match(self.input, 27, self.FOLLOW_27_in_enum_specifier656)
+ if self.failed:
+ return retval
+
+
+
+ self.match(self.input, 44, self.FOLLOW_44_in_enum_specifier659)
+ if self.failed:
+ return retval
+
+
+ elif alt23 == 3:
+ # C.g:269:4: 'enum' IDENTIFIER
+ self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier664)
+ if self.failed:
+ return retval
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enum_specifier666)
+ if self.failed:
+ return retval
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 18, enum_specifier_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end enum_specifier
+
+
+ # $ANTLR start enumerator_list
+ # C.g:272:1: enumerator_list : enumerator ( ',' enumerator )* ;
+ def enumerator_list(self, ):
+
+ enumerator_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 19):
+ return
+
+ # C.g:273:2: ( enumerator ( ',' enumerator )* )
+ # C.g:273:4: enumerator ( ',' enumerator )*
+ self.following.append(self.FOLLOW_enumerator_in_enumerator_list677)
+ self.enumerator()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:273:15: ( ',' enumerator )*
+ while True: #loop24
+ alt24 = 2
+ LA24_0 = self.input.LA(1)
+
+ if (LA24_0 == 27) :
+ LA24_1 = self.input.LA(2)
+
+ if (LA24_1 == IDENTIFIER) :
+ alt24 = 1
+
+
+
+
+ if alt24 == 1:
+ # C.g:273:16: ',' enumerator
+ self.match(self.input, 27, self.FOLLOW_27_in_enumerator_list680)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_enumerator_in_enumerator_list682)
+ self.enumerator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop24
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 19, enumerator_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end enumerator_list
+
+
+ # $ANTLR start enumerator
+ # C.g:276:1: enumerator : IDENTIFIER ( '=' constant_expression )? ;
+ def enumerator(self, ):
+
+ enumerator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 20):
+ return
+
+ # C.g:277:2: ( IDENTIFIER ( '=' constant_expression )? )
+ # C.g:277:4: IDENTIFIER ( '=' constant_expression )?
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enumerator695)
+ if self.failed:
+ return
+ # C.g:277:15: ( '=' constant_expression )?
+ alt25 = 2
+ LA25_0 = self.input.LA(1)
+
+ if (LA25_0 == 28) :
+ alt25 = 1
+ if alt25 == 1:
+ # C.g:277:16: '=' constant_expression
+ self.match(self.input, 28, self.FOLLOW_28_in_enumerator698)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_constant_expression_in_enumerator700)
+ self.constant_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 20, enumerator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end enumerator
+
+
+ # $ANTLR start type_qualifier
+ # C.g:280:1: type_qualifier : ( 'const' | 'volatile' | 'IN' | 'OUT' | 'OPTIONAL' | 'CONST' | 'UNALIGNED' | 'VOLATILE' | 'GLOBAL_REMOVE_IF_UNREFERENCED' | 'EFIAPI' | 'EFI_BOOTSERVICE' | 'EFI_RUNTIMESERVICE' | 'PACKED' );
+ def type_qualifier(self, ):
+
+ type_qualifier_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 21):
+ return
+
+ # C.g:281:2: ( 'const' | 'volatile' | 'IN' | 'OUT' | 'OPTIONAL' | 'CONST' | 'UNALIGNED' | 'VOLATILE' | 'GLOBAL_REMOVE_IF_UNREFERENCED' | 'EFIAPI' | 'EFI_BOOTSERVICE' | 'EFI_RUNTIMESERVICE' | 'PACKED' )
+ # C.g:
+ if (49 <= self.input.LA(1) <= 61):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_type_qualifier0
+ )
+ raise mse
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 21, type_qualifier_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end type_qualifier
+
+ class declarator_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start declarator
+ # C.g:296:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );
+ def declarator(self, ):
+
+ retval = self.declarator_return()
+ retval.start = self.input.LT(1)
+ declarator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 22):
+ return retval
+
+ # C.g:297:2: ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer )
+ alt30 = 2
+ LA30_0 = self.input.LA(1)
+
+ if (LA30_0 == 66) :
+ LA30_1 = self.input.LA(2)
+
+ if (self.synpred66()) :
+ alt30 = 1
+ elif (True) :
+ alt30 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("296:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );", 30, 1, self.input)
+
+ raise nvae
+
+ elif (LA30_0 == IDENTIFIER or (58 <= LA30_0 <= 60) or LA30_0 == 62) :
+ alt30 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("296:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );", 30, 0, self.input)
+
+ raise nvae
+
+ if alt30 == 1:
+ # C.g:297:4: ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator
+ # C.g:297:4: ( pointer )?
+ alt26 = 2
+ LA26_0 = self.input.LA(1)
+
+ if (LA26_0 == 66) :
+ alt26 = 1
+ if alt26 == 1:
+ # C.g:0:0: pointer
+ self.following.append(self.FOLLOW_pointer_in_declarator784)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+
+ # C.g:297:13: ( 'EFIAPI' )?
+ alt27 = 2
+ LA27_0 = self.input.LA(1)
+
+ if (LA27_0 == 58) :
+ alt27 = 1
+ if alt27 == 1:
+ # C.g:297:14: 'EFIAPI'
+ self.match(self.input, 58, self.FOLLOW_58_in_declarator788)
+ if self.failed:
+ return retval
+
+
+
+ # C.g:297:25: ( 'EFI_BOOTSERVICE' )?
+ alt28 = 2
+ LA28_0 = self.input.LA(1)
+
+ if (LA28_0 == 59) :
+ alt28 = 1
+ if alt28 == 1:
+ # C.g:297:26: 'EFI_BOOTSERVICE'
+ self.match(self.input, 59, self.FOLLOW_59_in_declarator793)
+ if self.failed:
+ return retval
+
+
+
+ # C.g:297:46: ( 'EFI_RUNTIMESERVICE' )?
+ alt29 = 2
+ LA29_0 = self.input.LA(1)
+
+ if (LA29_0 == 60) :
+ alt29 = 1
+ if alt29 == 1:
+ # C.g:297:47: 'EFI_RUNTIMESERVICE'
+ self.match(self.input, 60, self.FOLLOW_60_in_declarator798)
+ if self.failed:
+ return retval
+
+
+
+ self.following.append(self.FOLLOW_direct_declarator_in_declarator802)
+ self.direct_declarator()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ elif alt30 == 2:
+ # C.g:299:4: pointer
+ self.following.append(self.FOLLOW_pointer_in_declarator808)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 22, declarator_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end declarator
+
+
+ # $ANTLR start direct_declarator
+ # C.g:302:1: direct_declarator : ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ );
+ def direct_declarator(self, ):
+
+ direct_declarator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 23):
+ return
+
+ # C.g:303:2: ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ )
+ alt34 = 2
+ LA34_0 = self.input.LA(1)
+
+ if (LA34_0 == IDENTIFIER) :
+ alt34 = 1
+ elif (LA34_0 == 62) :
+ alt34 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("302:1: direct_declarator : ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ );", 34, 0, self.input)
+
+ raise nvae
+
+ if alt34 == 1:
+ # C.g:303:4: IDENTIFIER ( declarator_suffix )*
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_direct_declarator819)
+ if self.failed:
+ return
+ # C.g:303:15: ( declarator_suffix )*
+ while True: #loop31
+ alt31 = 2
+ LA31_0 = self.input.LA(1)
+
+ if (LA31_0 == 62) :
+ LA31 = self.input.LA(2)
+ if LA31 == 63:
+ LA31_30 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 58:
+ LA31_31 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 66:
+ LA31_32 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 59:
+ LA31_33 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 60:
+ LA31_34 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == IDENTIFIER:
+ LA31_35 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 29 or LA31 == 30 or LA31 == 31 or LA31 == 32 or LA31 == 33:
+ LA31_37 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 34:
+ LA31_38 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 35:
+ LA31_39 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 36:
+ LA31_40 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 37:
+ LA31_41 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 38:
+ LA31_42 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 39:
+ LA31_43 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 40:
+ LA31_44 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 41:
+ LA31_45 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 42:
+ LA31_46 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 45 or LA31 == 46:
+ LA31_47 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 48:
+ LA31_48 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 49 or LA31 == 50 or LA31 == 51 or LA31 == 52 or LA31 == 53 or LA31 == 54 or LA31 == 55 or LA31 == 56 or LA31 == 57 or LA31 == 61:
+ LA31_49 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+
+ elif (LA31_0 == 64) :
+ LA31 = self.input.LA(2)
+ if LA31 == 65:
+ LA31_51 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 62:
+ LA31_52 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == IDENTIFIER:
+ LA31_53 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == HEX_LITERAL:
+ LA31_54 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == OCTAL_LITERAL:
+ LA31_55 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == DECIMAL_LITERAL:
+ LA31_56 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == CHARACTER_LITERAL:
+ LA31_57 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == STRING_LITERAL:
+ LA31_58 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == FLOATING_POINT_LITERAL:
+ LA31_59 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 72:
+ LA31_60 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 73:
+ LA31_61 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 66 or LA31 == 68 or LA31 == 69 or LA31 == 77 or LA31 == 78 or LA31 == 79:
+ LA31_62 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 74:
+ LA31_63 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+
+
+
+ if alt31 == 1:
+ # C.g:0:0: declarator_suffix
+ self.following.append(self.FOLLOW_declarator_suffix_in_direct_declarator821)
+ self.declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop31
+
+
+
+
+ elif alt34 == 2:
+ # C.g:304:4: '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+
+ self.match(self.input, 62, self.FOLLOW_62_in_direct_declarator827)
+ if self.failed:
+ return
+ # C.g:304:8: ( 'EFIAPI' )?
+ alt32 = 2
+ LA32_0 = self.input.LA(1)
+
+ if (LA32_0 == 58) :
+ LA32_1 = self.input.LA(2)
+
+ if (self.synpred69()) :
+ alt32 = 1
+ if alt32 == 1:
+ # C.g:304:9: 'EFIAPI'
+ self.match(self.input, 58, self.FOLLOW_58_in_direct_declarator830)
+ if self.failed:
+ return
+
+
+
+ self.following.append(self.FOLLOW_declarator_in_direct_declarator834)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_direct_declarator836)
+ if self.failed:
+ return
+ # C.g:304:35: ( declarator_suffix )+
+ cnt33 = 0
+ while True: #loop33
+ alt33 = 2
+ LA33_0 = self.input.LA(1)
+
+ if (LA33_0 == 62) :
+ LA33 = self.input.LA(2)
+ if LA33 == 63:
+ LA33_30 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 58:
+ LA33_31 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 66:
+ LA33_32 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 59:
+ LA33_33 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 60:
+ LA33_34 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == IDENTIFIER:
+ LA33_35 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 29 or LA33 == 30 or LA33 == 31 or LA33 == 32 or LA33 == 33:
+ LA33_37 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 34:
+ LA33_38 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 35:
+ LA33_39 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 36:
+ LA33_40 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 37:
+ LA33_41 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 38:
+ LA33_42 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 39:
+ LA33_43 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 40:
+ LA33_44 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 41:
+ LA33_45 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 42:
+ LA33_46 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 45 or LA33 == 46:
+ LA33_47 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 48:
+ LA33_48 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 49 or LA33 == 50 or LA33 == 51 or LA33 == 52 or LA33 == 53 or LA33 == 54 or LA33 == 55 or LA33 == 56 or LA33 == 57 or LA33 == 61:
+ LA33_49 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+
+ elif (LA33_0 == 64) :
+ LA33 = self.input.LA(2)
+ if LA33 == 65:
+ LA33_51 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 62:
+ LA33_52 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == IDENTIFIER:
+ LA33_53 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == HEX_LITERAL:
+ LA33_54 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == OCTAL_LITERAL:
+ LA33_55 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == DECIMAL_LITERAL:
+ LA33_56 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == CHARACTER_LITERAL:
+ LA33_57 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == STRING_LITERAL:
+ LA33_58 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == FLOATING_POINT_LITERAL:
+ LA33_59 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 72:
+ LA33_60 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 73:
+ LA33_61 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 66 or LA33 == 68 or LA33 == 69 or LA33 == 77 or LA33 == 78 or LA33 == 79:
+ LA33_62 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 74:
+ LA33_63 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+
+
+
+ if alt33 == 1:
+ # C.g:0:0: declarator_suffix
+ self.following.append(self.FOLLOW_declarator_suffix_in_direct_declarator838)
+ self.declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ if cnt33 >= 1:
+ break #loop33
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(33, self.input)
+ raise eee
+
+ cnt33 += 1
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 23, direct_declarator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end direct_declarator
+
+
+ # $ANTLR start declarator_suffix
+ # C.g:307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );
+ def declarator_suffix(self, ):
+
+ declarator_suffix_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 24):
+ return
+
+ # C.g:308:2: ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' )
+ alt35 = 5
+ LA35_0 = self.input.LA(1)
+
+ if (LA35_0 == 64) :
+ LA35_1 = self.input.LA(2)
+
+ if (LA35_1 == 65) :
+ alt35 = 2
+ elif ((IDENTIFIER <= LA35_1 <= FLOATING_POINT_LITERAL) or LA35_1 == 62 or LA35_1 == 66 or (68 <= LA35_1 <= 69) or (72 <= LA35_1 <= 74) or (77 <= LA35_1 <= 79)) :
+ alt35 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 1, self.input)
+
+ raise nvae
+
+ elif (LA35_0 == 62) :
+ LA35 = self.input.LA(2)
+ if LA35 == 63:
+ alt35 = 5
+ elif LA35 == 29 or LA35 == 30 or LA35 == 31 or LA35 == 32 or LA35 == 33 or LA35 == 34 or LA35 == 35 or LA35 == 36 or LA35 == 37 or LA35 == 38 or LA35 == 39 or LA35 == 40 or LA35 == 41 or LA35 == 42 or LA35 == 45 or LA35 == 46 or LA35 == 48 or LA35 == 49 or LA35 == 50 or LA35 == 51 or LA35 == 52 or LA35 == 53 or LA35 == 54 or LA35 == 55 or LA35 == 56 or LA35 == 57 or LA35 == 58 or LA35 == 59 or LA35 == 60 or LA35 == 61 or LA35 == 66:
+ alt35 = 3
+ elif LA35 == IDENTIFIER:
+ LA35_29 = self.input.LA(3)
+
+ if (self.synpred73()) :
+ alt35 = 3
+ elif (self.synpred74()) :
+ alt35 = 4
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 29, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 2, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 0, self.input)
+
+ raise nvae
+
+ if alt35 == 1:
+ # C.g:308:6: '[' constant_expression ']'
+ self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix852)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_constant_expression_in_declarator_suffix854)
+ self.constant_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 65, self.FOLLOW_65_in_declarator_suffix856)
+ if self.failed:
+ return
+
+
+ elif alt35 == 2:
+ # C.g:309:9: '[' ']'
+ self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix866)
+ if self.failed:
+ return
+ self.match(self.input, 65, self.FOLLOW_65_in_declarator_suffix868)
+ if self.failed:
+ return
+
+
+ elif alt35 == 3:
+ # C.g:310:9: '(' parameter_type_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix878)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_parameter_type_list_in_declarator_suffix880)
+ self.parameter_type_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix882)
+ if self.failed:
+ return
+
+
+ elif alt35 == 4:
+ # C.g:311:9: '(' identifier_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix892)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_identifier_list_in_declarator_suffix894)
+ self.identifier_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix896)
+ if self.failed:
+ return
+
+
+ elif alt35 == 5:
+ # C.g:312:9: '(' ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix906)
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix908)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 24, declarator_suffix_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end declarator_suffix
+
+
+ # $ANTLR start pointer
+ # C.g:315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );
+ def pointer(self, ):
+
+ pointer_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 25):
+ return
+
+ # C.g:316:2: ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' )
+ alt38 = 3
+ LA38_0 = self.input.LA(1)
+
+ if (LA38_0 == 66) :
+ LA38 = self.input.LA(2)
+ if LA38 == 66:
+ LA38_2 = self.input.LA(3)
+
+ if (self.synpred78()) :
+ alt38 = 2
+ elif (True) :
+ alt38 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 2, self.input)
+
+ raise nvae
+
+ elif LA38 == 58:
+ LA38_3 = self.input.LA(3)
+
+ if (self.synpred77()) :
+ alt38 = 1
+ elif (True) :
+ alt38 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 3, self.input)
+
+ raise nvae
+
+ elif LA38 == 59:
+ LA38_4 = self.input.LA(3)
+
+ if (self.synpred77()) :
+ alt38 = 1
+ elif (True) :
+ alt38 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 4, self.input)
+
+ raise nvae
+
+ elif LA38 == 60:
+ LA38_5 = self.input.LA(3)
+
+ if (self.synpred77()) :
+ alt38 = 1
+ elif (True) :
+ alt38 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 5, self.input)
+
+ raise nvae
+
+ elif LA38 == EOF or LA38 == IDENTIFIER or LA38 == 25 or LA38 == 26 or LA38 == 27 or LA38 == 28 or LA38 == 29 or LA38 == 30 or LA38 == 31 or LA38 == 32 or LA38 == 33 or LA38 == 34 or LA38 == 35 or LA38 == 36 or LA38 == 37 or LA38 == 38 or LA38 == 39 or LA38 == 40 or LA38 == 41 or LA38 == 42 or LA38 == 43 or LA38 == 45 or LA38 == 46 or LA38 == 47 or LA38 == 48 or LA38 == 62 or LA38 == 63 or LA38 == 64:
+ alt38 = 3
+ elif LA38 == 53:
+ LA38_21 = self.input.LA(3)
+
+ if (self.synpred77()) :
+ alt38 = 1
+ elif (True) :
+ alt38 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 21, self.input)
+
+ raise nvae
+
+ elif LA38 == 49 or LA38 == 50 or LA38 == 51 or LA38 == 52 or LA38 == 54 or LA38 == 55 or LA38 == 56 or LA38 == 57 or LA38 == 61:
+ LA38_29 = self.input.LA(3)
+
+ if (self.synpred77()) :
+ alt38 = 1
+ elif (True) :
+ alt38 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 29, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 1, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 0, self.input)
+
+ raise nvae
+
+ if alt38 == 1:
+ # C.g:316:4: '*' ( type_qualifier )+ ( pointer )?
+ self.match(self.input, 66, self.FOLLOW_66_in_pointer919)
+ if self.failed:
+ return
+ # C.g:316:8: ( type_qualifier )+
+ cnt36 = 0
+ while True: #loop36
+ alt36 = 2
+ LA36 = self.input.LA(1)
+ if LA36 == 58:
+ LA36_2 = self.input.LA(2)
+
+ if (self.synpred75()) :
+ alt36 = 1
+
+
+ elif LA36 == 59:
+ LA36_3 = self.input.LA(2)
+
+ if (self.synpred75()) :
+ alt36 = 1
+
+
+ elif LA36 == 60:
+ LA36_4 = self.input.LA(2)
+
+ if (self.synpred75()) :
+ alt36 = 1
+
+
+ elif LA36 == 53:
+ LA36_20 = self.input.LA(2)
+
+ if (self.synpred75()) :
+ alt36 = 1
+
+
+ elif LA36 == 49 or LA36 == 50 or LA36 == 51 or LA36 == 52 or LA36 == 54 or LA36 == 55 or LA36 == 56 or LA36 == 57 or LA36 == 61:
+ LA36_28 = self.input.LA(2)
+
+ if (self.synpred75()) :
+ alt36 = 1
+
+
+
+ if alt36 == 1:
+ # C.g:0:0: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_pointer921)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ if cnt36 >= 1:
+ break #loop36
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(36, self.input)
+ raise eee
+
+ cnt36 += 1
+
+
+ # C.g:316:24: ( pointer )?
+ alt37 = 2
+ LA37_0 = self.input.LA(1)
+
+ if (LA37_0 == 66) :
+ LA37_1 = self.input.LA(2)
+
+ if (self.synpred76()) :
+ alt37 = 1
+ if alt37 == 1:
+ # C.g:0:0: pointer
+ self.following.append(self.FOLLOW_pointer_in_pointer924)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ elif alt38 == 2:
+ # C.g:317:4: '*' pointer
+ self.match(self.input, 66, self.FOLLOW_66_in_pointer930)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_pointer_in_pointer932)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt38 == 3:
+ # C.g:318:4: '*'
+ self.match(self.input, 66, self.FOLLOW_66_in_pointer937)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 25, pointer_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end pointer
+
+
+ # $ANTLR start parameter_type_list
+ # C.g:321:1: parameter_type_list : parameter_list ( ',' ( 'OPTIONAL' )? '...' )? ;
+ def parameter_type_list(self, ):
+
+ parameter_type_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 26):
+ return
+
+ # C.g:322:2: ( parameter_list ( ',' ( 'OPTIONAL' )? '...' )? )
+ # C.g:322:4: parameter_list ( ',' ( 'OPTIONAL' )? '...' )?
+ self.following.append(self.FOLLOW_parameter_list_in_parameter_type_list948)
+ self.parameter_list()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:322:19: ( ',' ( 'OPTIONAL' )? '...' )?
+ alt40 = 2
+ LA40_0 = self.input.LA(1)
+
+ if (LA40_0 == 27) :
+ alt40 = 1
+ if alt40 == 1:
+ # C.g:322:20: ',' ( 'OPTIONAL' )? '...'
+ self.match(self.input, 27, self.FOLLOW_27_in_parameter_type_list951)
+ if self.failed:
+ return
+ # C.g:322:24: ( 'OPTIONAL' )?
+ alt39 = 2
+ LA39_0 = self.input.LA(1)
+
+ if (LA39_0 == 53) :
+ alt39 = 1
+ if alt39 == 1:
+ # C.g:322:25: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_parameter_type_list954)
+ if self.failed:
+ return
+
+
+
+ self.match(self.input, 67, self.FOLLOW_67_in_parameter_type_list958)
+ if self.failed:
+ return
+
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 26, parameter_type_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end parameter_type_list
+
+
+ # $ANTLR start parameter_list
+ # C.g:325:1: parameter_list : parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )* ;
+ def parameter_list(self, ):
+
+ parameter_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 27):
+ return
+
+ # C.g:326:2: ( parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )* )
+ # C.g:326:4: parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )*
+ self.following.append(self.FOLLOW_parameter_declaration_in_parameter_list971)
+ self.parameter_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:326:26: ( ',' ( 'OPTIONAL' )? parameter_declaration )*
+ while True: #loop42
+ alt42 = 2
+ LA42_0 = self.input.LA(1)
+
+ if (LA42_0 == 27) :
+ LA42_1 = self.input.LA(2)
+
+ if (LA42_1 == 53) :
+ LA42_3 = self.input.LA(3)
+
+ if (self.synpred82()) :
+ alt42 = 1
+
+
+ elif (LA42_1 == IDENTIFIER or (29 <= LA42_1 <= 42) or (45 <= LA42_1 <= 46) or (48 <= LA42_1 <= 52) or (54 <= LA42_1 <= 61) or LA42_1 == 66) :
+ alt42 = 1
+
+
+
+
+ if alt42 == 1:
+ # C.g:326:27: ',' ( 'OPTIONAL' )? parameter_declaration
+ self.match(self.input, 27, self.FOLLOW_27_in_parameter_list974)
+ if self.failed:
+ return
+ # C.g:326:31: ( 'OPTIONAL' )?
+ alt41 = 2
+ LA41_0 = self.input.LA(1)
+
+ if (LA41_0 == 53) :
+ LA41_1 = self.input.LA(2)
+
+ if (self.synpred81()) :
+ alt41 = 1
+ if alt41 == 1:
+ # C.g:326:32: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_parameter_list977)
+ if self.failed:
+ return
+
+
+
+ self.following.append(self.FOLLOW_parameter_declaration_in_parameter_list981)
+ self.parameter_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop42
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 27, parameter_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end parameter_list
+
+
+ # $ANTLR start parameter_declaration
+ # C.g:329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );
+ def parameter_declaration(self, ):
+
+ parameter_declaration_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 28):
+ return
+
+ # C.g:330:2: ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER )
+ alt46 = 2
+ LA46 = self.input.LA(1)
+ if LA46 == 29 or LA46 == 30 or LA46 == 31 or LA46 == 32 or LA46 == 33 or LA46 == 34 or LA46 == 35 or LA46 == 36 or LA46 == 37 or LA46 == 38 or LA46 == 39 or LA46 == 40 or LA46 == 41 or LA46 == 42 or LA46 == 45 or LA46 == 46 or LA46 == 48 or LA46 == 49 or LA46 == 50 or LA46 == 51 or LA46 == 52 or LA46 == 53 or LA46 == 54 or LA46 == 55 or LA46 == 56 or LA46 == 57 or LA46 == 58 or LA46 == 59 or LA46 == 60 or LA46 == 61:
+ alt46 = 1
+ elif LA46 == IDENTIFIER:
+ LA46_13 = self.input.LA(2)
+
+ if (self.synpred86()) :
+ alt46 = 1
+ elif (True) :
+ alt46 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 13, self.input)
+
+ raise nvae
+
+ elif LA46 == 66:
+ alt46 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 0, self.input)
+
+ raise nvae
+
+ if alt46 == 1:
+ # C.g:330:4: declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )?
+ self.following.append(self.FOLLOW_declaration_specifiers_in_parameter_declaration994)
+ self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:330:27: ( declarator | abstract_declarator )*
+ while True: #loop43
+ alt43 = 3
+ LA43 = self.input.LA(1)
+ if LA43 == 66:
+ LA43_5 = self.input.LA(2)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+ elif LA43 == IDENTIFIER or LA43 == 58 or LA43 == 59 or LA43 == 60:
+ alt43 = 1
+ elif LA43 == 62:
+ LA43 = self.input.LA(2)
+ if LA43 == 29 or LA43 == 30 or LA43 == 31 or LA43 == 32 or LA43 == 33 or LA43 == 34 or LA43 == 35 or LA43 == 36 or LA43 == 37 or LA43 == 38 or LA43 == 39 or LA43 == 40 or LA43 == 41 or LA43 == 42 or LA43 == 45 or LA43 == 46 or LA43 == 48 or LA43 == 49 or LA43 == 50 or LA43 == 51 or LA43 == 52 or LA43 == 53 or LA43 == 54 or LA43 == 55 or LA43 == 56 or LA43 == 57 or LA43 == 61 or LA43 == 63 or LA43 == 64:
+ alt43 = 2
+ elif LA43 == IDENTIFIER:
+ LA43_37 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+ elif LA43 == 58:
+ LA43_38 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+ elif LA43 == 66:
+ LA43_39 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+ elif LA43 == 59:
+ LA43_40 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+ elif LA43 == 60:
+ LA43_41 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+ elif LA43 == 62:
+ LA43_43 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+
+ elif LA43 == 64:
+ alt43 = 2
+
+ if alt43 == 1:
+ # C.g:330:28: declarator
+ self.following.append(self.FOLLOW_declarator_in_parameter_declaration997)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt43 == 2:
+ # C.g:330:39: abstract_declarator
+ self.following.append(self.FOLLOW_abstract_declarator_in_parameter_declaration999)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop43
+
+
+ # C.g:330:61: ( 'OPTIONAL' )?
+ alt44 = 2
+ LA44_0 = self.input.LA(1)
+
+ if (LA44_0 == 53) :
+ alt44 = 1
+ if alt44 == 1:
+ # C.g:330:62: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_parameter_declaration1004)
+ if self.failed:
+ return
+
+
+
+
+
+ elif alt46 == 2:
+ # C.g:332:4: ( pointer )* IDENTIFIER
+ # C.g:332:4: ( pointer )*
+ while True: #loop45
+ alt45 = 2
+ LA45_0 = self.input.LA(1)
+
+ if (LA45_0 == 66) :
+ alt45 = 1
+
+
+ if alt45 == 1:
+ # C.g:0:0: pointer
+ self.following.append(self.FOLLOW_pointer_in_parameter_declaration1013)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop45
+
+
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_parameter_declaration1016)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 28, parameter_declaration_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end parameter_declaration
+
+
+ # $ANTLR start identifier_list
+ # C.g:335:1: identifier_list : IDENTIFIER ( ',' IDENTIFIER )* ;
+ def identifier_list(self, ):
+
+ identifier_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 29):
+ return
+
+ # C.g:336:2: ( IDENTIFIER ( ',' IDENTIFIER )* )
+ # C.g:336:4: IDENTIFIER ( ',' IDENTIFIER )*
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1027)
+ if self.failed:
+ return
+ # C.g:337:2: ( ',' IDENTIFIER )*
+ while True: #loop47
+ alt47 = 2
+ LA47_0 = self.input.LA(1)
+
+ if (LA47_0 == 27) :
+ alt47 = 1
+
+
+ if alt47 == 1:
+ # C.g:337:3: ',' IDENTIFIER
+ self.match(self.input, 27, self.FOLLOW_27_in_identifier_list1031)
+ if self.failed:
+ return
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1033)
+ if self.failed:
+ return
+
+
+ else:
+ break #loop47
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 29, identifier_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end identifier_list
+
+
+ # $ANTLR start type_name
+ # C.g:340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );
+ def type_name(self, ):
+
+ type_name_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 30):
+ return
+
+ # C.g:341:2: ( specifier_qualifier_list ( abstract_declarator )? | type_id )
+ alt49 = 2
+ LA49_0 = self.input.LA(1)
+
+ if ((34 <= LA49_0 <= 42) or (45 <= LA49_0 <= 46) or (48 <= LA49_0 <= 61)) :
+ alt49 = 1
+ elif (LA49_0 == IDENTIFIER) :
+ LA49_13 = self.input.LA(2)
+
+ if (self.synpred90()) :
+ alt49 = 1
+ elif (True) :
+ alt49 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 13, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 0, self.input)
+
+ raise nvae
+
+ if alt49 == 1:
+ # C.g:341:4: specifier_qualifier_list ( abstract_declarator )?
+ self.following.append(self.FOLLOW_specifier_qualifier_list_in_type_name1046)
+ self.specifier_qualifier_list()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:341:29: ( abstract_declarator )?
+ alt48 = 2
+ LA48_0 = self.input.LA(1)
+
+ if (LA48_0 == 62 or LA48_0 == 64 or LA48_0 == 66) :
+ alt48 = 1
+ if alt48 == 1:
+ # C.g:0:0: abstract_declarator
+ self.following.append(self.FOLLOW_abstract_declarator_in_type_name1048)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ elif alt49 == 2:
+ # C.g:342:4: type_id
+ self.following.append(self.FOLLOW_type_id_in_type_name1054)
+ self.type_id()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 30, type_name_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end type_name
+
+
+ # $ANTLR start abstract_declarator
+ # C.g:345:1: abstract_declarator : ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator );
+ def abstract_declarator(self, ):
+
+ abstract_declarator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 31):
+ return
+
+ # C.g:346:2: ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator )
+ alt51 = 2
+ LA51_0 = self.input.LA(1)
+
+ if (LA51_0 == 66) :
+ alt51 = 1
+ elif (LA51_0 == 62 or LA51_0 == 64) :
+ alt51 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("345:1: abstract_declarator : ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator );", 51, 0, self.input)
+
+ raise nvae
+
+ if alt51 == 1:
+ # C.g:346:4: pointer ( direct_abstract_declarator )?
+ self.following.append(self.FOLLOW_pointer_in_abstract_declarator1065)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:346:12: ( direct_abstract_declarator )?
+ alt50 = 2
+ LA50_0 = self.input.LA(1)
+
+ if (LA50_0 == 62) :
+ LA50 = self.input.LA(2)
+ if LA50 == 63:
+ LA50_12 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 58:
+ LA50_13 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 66:
+ LA50_14 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 59:
+ LA50_15 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 60:
+ LA50_16 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == IDENTIFIER:
+ LA50_17 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 62:
+ LA50_18 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 64:
+ LA50_19 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 29 or LA50 == 30 or LA50 == 31 or LA50 == 32 or LA50 == 33:
+ LA50_20 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 34:
+ LA50_21 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 35:
+ LA50_22 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 36:
+ LA50_23 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 37:
+ LA50_24 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 38:
+ LA50_25 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 39:
+ LA50_26 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 40:
+ LA50_27 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 41:
+ LA50_28 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 42:
+ LA50_29 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 45 or LA50 == 46:
+ LA50_30 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 48:
+ LA50_31 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 49 or LA50 == 50 or LA50 == 51 or LA50 == 52 or LA50 == 53 or LA50 == 54 or LA50 == 55 or LA50 == 56 or LA50 == 57 or LA50 == 61:
+ LA50_32 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif (LA50_0 == 64) :
+ LA50 = self.input.LA(2)
+ if LA50 == 65:
+ LA50_33 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 62:
+ LA50_34 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == IDENTIFIER:
+ LA50_35 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == HEX_LITERAL:
+ LA50_36 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == OCTAL_LITERAL:
+ LA50_37 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == DECIMAL_LITERAL:
+ LA50_38 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == CHARACTER_LITERAL:
+ LA50_39 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == STRING_LITERAL:
+ LA50_40 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == FLOATING_POINT_LITERAL:
+ LA50_41 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 72:
+ LA50_42 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 73:
+ LA50_43 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 66 or LA50 == 68 or LA50 == 69 or LA50 == 77 or LA50 == 78 or LA50 == 79:
+ LA50_44 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 74:
+ LA50_45 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ if alt50 == 1:
+ # C.g:0:0: direct_abstract_declarator
+ self.following.append(self.FOLLOW_direct_abstract_declarator_in_abstract_declarator1067)
+ self.direct_abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ elif alt51 == 2:
+ # C.g:347:4: direct_abstract_declarator
+ self.following.append(self.FOLLOW_direct_abstract_declarator_in_abstract_declarator1073)
+ self.direct_abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 31, abstract_declarator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end abstract_declarator
+
+
+ # $ANTLR start direct_abstract_declarator
+ # C.g:350:1: direct_abstract_declarator : ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )* ;
+ def direct_abstract_declarator(self, ):
+
+ direct_abstract_declarator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 32):
+ return
+
+ # C.g:351:2: ( ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )* )
+ # C.g:351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )*
+ # C.g:351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )
+ alt52 = 2
+ LA52_0 = self.input.LA(1)
+
+ if (LA52_0 == 62) :
+ LA52 = self.input.LA(2)
+ if LA52 == IDENTIFIER or LA52 == 29 or LA52 == 30 or LA52 == 31 or LA52 == 32 or LA52 == 33 or LA52 == 34 or LA52 == 35 or LA52 == 36 or LA52 == 37 or LA52 == 38 or LA52 == 39 or LA52 == 40 or LA52 == 41 or LA52 == 42 or LA52 == 45 or LA52 == 46 or LA52 == 48 or LA52 == 49 or LA52 == 50 or LA52 == 51 or LA52 == 52 or LA52 == 53 or LA52 == 54 or LA52 == 55 or LA52 == 56 or LA52 == 57 or LA52 == 58 or LA52 == 59 or LA52 == 60 or LA52 == 61 or LA52 == 63:
+ alt52 = 2
+ elif LA52 == 66:
+ LA52_18 = self.input.LA(3)
+
+ if (self.synpred93()) :
+ alt52 = 1
+ elif (True) :
+ alt52 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 18, self.input)
+
+ raise nvae
+
+ elif LA52 == 62 or LA52 == 64:
+ alt52 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 1, self.input)
+
+ raise nvae
+
+ elif (LA52_0 == 64) :
+ alt52 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 0, self.input)
+
+ raise nvae
+
+ if alt52 == 1:
+ # C.g:351:6: '(' abstract_declarator ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_direct_abstract_declarator1086)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_abstract_declarator_in_direct_abstract_declarator1088)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_direct_abstract_declarator1090)
+ if self.failed:
+ return
+
+
+ elif alt52 == 2:
+ # C.g:351:36: abstract_declarator_suffix
+ self.following.append(self.FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1094)
+ self.abstract_declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ # C.g:351:65: ( abstract_declarator_suffix )*
+ while True: #loop53
+ alt53 = 2
+ LA53_0 = self.input.LA(1)
+
+ if (LA53_0 == 62) :
+ LA53 = self.input.LA(2)
+ if LA53 == 63:
+ LA53_12 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 58:
+ LA53_13 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 66:
+ LA53_14 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 59:
+ LA53_15 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 60:
+ LA53_16 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == IDENTIFIER:
+ LA53_17 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 29 or LA53 == 30 or LA53 == 31 or LA53 == 32 or LA53 == 33:
+ LA53_19 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 34:
+ LA53_20 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 35:
+ LA53_21 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 36:
+ LA53_22 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 37:
+ LA53_23 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 38:
+ LA53_24 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 39:
+ LA53_25 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 40:
+ LA53_26 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 41:
+ LA53_27 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 42:
+ LA53_28 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 45 or LA53 == 46:
+ LA53_29 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 48:
+ LA53_30 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 49 or LA53 == 50 or LA53 == 51 or LA53 == 52 or LA53 == 53 or LA53 == 54 or LA53 == 55 or LA53 == 56 or LA53 == 57 or LA53 == 61:
+ LA53_31 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+
+ elif (LA53_0 == 64) :
+ LA53 = self.input.LA(2)
+ if LA53 == 65:
+ LA53_33 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 62:
+ LA53_34 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == IDENTIFIER:
+ LA53_35 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == HEX_LITERAL:
+ LA53_36 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == OCTAL_LITERAL:
+ LA53_37 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == DECIMAL_LITERAL:
+ LA53_38 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == CHARACTER_LITERAL:
+ LA53_39 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == STRING_LITERAL:
+ LA53_40 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == FLOATING_POINT_LITERAL:
+ LA53_41 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 72:
+ LA53_42 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 73:
+ LA53_43 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 66 or LA53 == 68 or LA53 == 69 or LA53 == 77 or LA53 == 78 or LA53 == 79:
+ LA53_44 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 74:
+ LA53_45 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+
+
+
+ if alt53 == 1:
+ # C.g:0:0: abstract_declarator_suffix
+ self.following.append(self.FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1098)
+ self.abstract_declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop53
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 32, direct_abstract_declarator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end direct_abstract_declarator
+
+
+ # $ANTLR start abstract_declarator_suffix
+ # C.g:354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );
+ def abstract_declarator_suffix(self, ):
+
+ abstract_declarator_suffix_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 33):
+ return
+
+ # C.g:355:2: ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' )
+ alt54 = 4
+ LA54_0 = self.input.LA(1)
+
+ if (LA54_0 == 64) :
+ LA54_1 = self.input.LA(2)
+
+ if (LA54_1 == 65) :
+ alt54 = 1
+ elif ((IDENTIFIER <= LA54_1 <= FLOATING_POINT_LITERAL) or LA54_1 == 62 or LA54_1 == 66 or (68 <= LA54_1 <= 69) or (72 <= LA54_1 <= 74) or (77 <= LA54_1 <= 79)) :
+ alt54 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 1, self.input)
+
+ raise nvae
+
+ elif (LA54_0 == 62) :
+ LA54_2 = self.input.LA(2)
+
+ if (LA54_2 == 63) :
+ alt54 = 3
+ elif (LA54_2 == IDENTIFIER or (29 <= LA54_2 <= 42) or (45 <= LA54_2 <= 46) or (48 <= LA54_2 <= 61) or LA54_2 == 66) :
+ alt54 = 4
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 2, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 0, self.input)
+
+ raise nvae
+
+ if alt54 == 1:
+ # C.g:355:4: '[' ']'
+ self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1110)
+ if self.failed:
+ return
+ self.match(self.input, 65, self.FOLLOW_65_in_abstract_declarator_suffix1112)
+ if self.failed:
+ return
+
+
+ elif alt54 == 2:
+ # C.g:356:4: '[' constant_expression ']'
+ self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1117)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_constant_expression_in_abstract_declarator_suffix1119)
+ self.constant_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 65, self.FOLLOW_65_in_abstract_declarator_suffix1121)
+ if self.failed:
+ return
+
+
+ elif alt54 == 3:
+ # C.g:357:4: '(' ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1126)
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1128)
+ if self.failed:
+ return
+
+
+ elif alt54 == 4:
+ # C.g:358:4: '(' parameter_type_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1133)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_parameter_type_list_in_abstract_declarator_suffix1135)
+ self.parameter_type_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1137)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 33, abstract_declarator_suffix_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end abstract_declarator_suffix
+
+
+ # $ANTLR start initializer
+ # C.g:361:1: initializer : ( assignment_expression | '{' initializer_list ( ',' )? '}' );
+ def initializer(self, ):
+
+ initializer_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 34):
+ return
+
+ # C.g:363:2: ( assignment_expression | '{' initializer_list ( ',' )? '}' )
+ alt56 = 2
+ LA56_0 = self.input.LA(1)
+
+ if ((IDENTIFIER <= LA56_0 <= FLOATING_POINT_LITERAL) or LA56_0 == 62 or LA56_0 == 66 or (68 <= LA56_0 <= 69) or (72 <= LA56_0 <= 74) or (77 <= LA56_0 <= 79)) :
+ alt56 = 1
+ elif (LA56_0 == 43) :
+ alt56 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("361:1: initializer : ( assignment_expression | '{' initializer_list ( ',' )? '}' );", 56, 0, self.input)
+
+ raise nvae
+
+ if alt56 == 1:
+ # C.g:363:4: assignment_expression
+ self.following.append(self.FOLLOW_assignment_expression_in_initializer1150)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt56 == 2:
+ # C.g:364:4: '{' initializer_list ( ',' )? '}'
+ self.match(self.input, 43, self.FOLLOW_43_in_initializer1155)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_initializer_list_in_initializer1157)
+ self.initializer_list()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:364:25: ( ',' )?
+ alt55 = 2
+ LA55_0 = self.input.LA(1)
+
+ if (LA55_0 == 27) :
+ alt55 = 1
+ if alt55 == 1:
+ # C.g:0:0: ','
+ self.match(self.input, 27, self.FOLLOW_27_in_initializer1159)
+ if self.failed:
+ return
+
+
+
+ self.match(self.input, 44, self.FOLLOW_44_in_initializer1162)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 34, initializer_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end initializer
+
+
+ # $ANTLR start initializer_list
+ # C.g:367:1: initializer_list : initializer ( ',' initializer )* ;
+ def initializer_list(self, ):
+
+ initializer_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 35):
+ return
+
+ # C.g:368:2: ( initializer ( ',' initializer )* )
+ # C.g:368:4: initializer ( ',' initializer )*
+ self.following.append(self.FOLLOW_initializer_in_initializer_list1173)
+ self.initializer()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:368:16: ( ',' initializer )*
+ while True: #loop57
+ alt57 = 2
+ LA57_0 = self.input.LA(1)
+
+ if (LA57_0 == 27) :
+ LA57_1 = self.input.LA(2)
+
+ if ((IDENTIFIER <= LA57_1 <= FLOATING_POINT_LITERAL) or LA57_1 == 43 or LA57_1 == 62 or LA57_1 == 66 or (68 <= LA57_1 <= 69) or (72 <= LA57_1 <= 74) or (77 <= LA57_1 <= 79)) :
+ alt57 = 1
+
+
+
+
+ if alt57 == 1:
+ # C.g:368:17: ',' initializer
+ self.match(self.input, 27, self.FOLLOW_27_in_initializer_list1176)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_initializer_in_initializer_list1178)
+ self.initializer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop57
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 35, initializer_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end initializer_list
+
+ class argument_expression_list_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start argument_expression_list
+ # C.g:373:1: argument_expression_list : assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )* ;
+ def argument_expression_list(self, ):
+
+ retval = self.argument_expression_list_return()
+ retval.start = self.input.LT(1)
+ argument_expression_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 36):
+ return retval
+
+ # C.g:374:2: ( assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )* )
+ # C.g:374:6: assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )*
+ self.following.append(self.FOLLOW_assignment_expression_in_argument_expression_list1196)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:374:28: ( 'OPTIONAL' )?
+ alt58 = 2
+ LA58_0 = self.input.LA(1)
+
+ if (LA58_0 == 53) :
+ alt58 = 1
+ if alt58 == 1:
+ # C.g:374:29: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_argument_expression_list1199)
+ if self.failed:
+ return retval
+
+
+
+ # C.g:374:42: ( ',' assignment_expression ( 'OPTIONAL' )? )*
+ while True: #loop60
+ alt60 = 2
+ LA60_0 = self.input.LA(1)
+
+ if (LA60_0 == 27) :
+ alt60 = 1
+
+
+ if alt60 == 1:
+ # C.g:374:43: ',' assignment_expression ( 'OPTIONAL' )?
+ self.match(self.input, 27, self.FOLLOW_27_in_argument_expression_list1204)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_assignment_expression_in_argument_expression_list1206)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:374:69: ( 'OPTIONAL' )?
+ alt59 = 2
+ LA59_0 = self.input.LA(1)
+
+ if (LA59_0 == 53) :
+ alt59 = 1
+ if alt59 == 1:
+ # C.g:374:70: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_argument_expression_list1209)
+ if self.failed:
+ return retval
+
+
+
+
+
+ else:
+ break #loop60
+
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 36, argument_expression_list_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end argument_expression_list
+
+
+ # $ANTLR start additive_expression
+ # C.g:377:1: additive_expression : ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )* ;
+ def additive_expression(self, ):
+
+ additive_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 37):
+ return
+
+ # C.g:378:2: ( ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )* )
+ # C.g:378:4: ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )*
+ # C.g:378:4: ( multiplicative_expression )
+ # C.g:378:5: multiplicative_expression
+ self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1225)
+ self.multiplicative_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ # C.g:378:32: ( '+' multiplicative_expression | '-' multiplicative_expression )*
+ while True: #loop61
+ alt61 = 3
+ LA61_0 = self.input.LA(1)
+
+ if (LA61_0 == 68) :
+ alt61 = 1
+ elif (LA61_0 == 69) :
+ alt61 = 2
+
+
+ if alt61 == 1:
+ # C.g:378:33: '+' multiplicative_expression
+ self.match(self.input, 68, self.FOLLOW_68_in_additive_expression1229)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1231)
+ self.multiplicative_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt61 == 2:
+ # C.g:378:65: '-' multiplicative_expression
+ self.match(self.input, 69, self.FOLLOW_69_in_additive_expression1235)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1237)
+ self.multiplicative_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop61
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 37, additive_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end additive_expression
+
+
+ # $ANTLR start multiplicative_expression
+ # C.g:381:1: multiplicative_expression : ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* ;
+ def multiplicative_expression(self, ):
+
+ multiplicative_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 38):
+ return
+
+ # C.g:382:2: ( ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* )
+ # C.g:382:4: ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )*
+ # C.g:382:4: ( cast_expression )
+ # C.g:382:5: cast_expression
+ self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1251)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ # C.g:382:22: ( '*' cast_expression | '/' cast_expression | '%' cast_expression )*
+ while True: #loop62
+ alt62 = 4
+ LA62 = self.input.LA(1)
+ if LA62 == 66:
+ alt62 = 1
+ elif LA62 == 70:
+ alt62 = 2
+ elif LA62 == 71:
+ alt62 = 3
+
+ if alt62 == 1:
+ # C.g:382:23: '*' cast_expression
+ self.match(self.input, 66, self.FOLLOW_66_in_multiplicative_expression1255)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1257)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt62 == 2:
+ # C.g:382:45: '/' cast_expression
+ self.match(self.input, 70, self.FOLLOW_70_in_multiplicative_expression1261)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1263)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt62 == 3:
+ # C.g:382:67: '%' cast_expression
+ self.match(self.input, 71, self.FOLLOW_71_in_multiplicative_expression1267)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1269)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop62
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 38, multiplicative_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end multiplicative_expression
+
+
+ # $ANTLR start cast_expression
+ # C.g:385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );
+ def cast_expression(self, ):
+
+ cast_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 39):
+ return
+
+ # C.g:386:2: ( '(' type_name ')' cast_expression | unary_expression )
+ alt63 = 2
+ LA63_0 = self.input.LA(1)
+
+ if (LA63_0 == 62) :
+ LA63 = self.input.LA(2)
+ if LA63 == 34 or LA63 == 35 or LA63 == 36 or LA63 == 37 or LA63 == 38 or LA63 == 39 or LA63 == 40 or LA63 == 41 or LA63 == 42 or LA63 == 45 or LA63 == 46 or LA63 == 48 or LA63 == 49 or LA63 == 50 or LA63 == 51 or LA63 == 52 or LA63 == 53 or LA63 == 54 or LA63 == 55 or LA63 == 56 or LA63 == 57 or LA63 == 58 or LA63 == 59 or LA63 == 60 or LA63 == 61:
+ alt63 = 1
+ elif LA63 == IDENTIFIER:
+ LA63_25 = self.input.LA(3)
+
+ if (self.synpred109()) :
+ alt63 = 1
+ elif (True) :
+ alt63 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 25, self.input)
+
+ raise nvae
+
+ elif LA63 == HEX_LITERAL or LA63 == OCTAL_LITERAL or LA63 == DECIMAL_LITERAL or LA63 == CHARACTER_LITERAL or LA63 == STRING_LITERAL or LA63 == FLOATING_POINT_LITERAL or LA63 == 62 or LA63 == 66 or LA63 == 68 or LA63 == 69 or LA63 == 72 or LA63 == 73 or LA63 == 74 or LA63 == 77 or LA63 == 78 or LA63 == 79:
+ alt63 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 1, self.input)
+
+ raise nvae
+
+ elif ((IDENTIFIER <= LA63_0 <= FLOATING_POINT_LITERAL) or LA63_0 == 66 or (68 <= LA63_0 <= 69) or (72 <= LA63_0 <= 74) or (77 <= LA63_0 <= 79)) :
+ alt63 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 0, self.input)
+
+ raise nvae
+
+ if alt63 == 1:
+ # C.g:386:4: '(' type_name ')' cast_expression
+ self.match(self.input, 62, self.FOLLOW_62_in_cast_expression1282)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_type_name_in_cast_expression1284)
+ self.type_name()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_cast_expression1286)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_cast_expression_in_cast_expression1288)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt63 == 2:
+ # C.g:387:4: unary_expression
+ self.following.append(self.FOLLOW_unary_expression_in_cast_expression1293)
+ self.unary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 39, cast_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end cast_expression
+
+
+ # $ANTLR start unary_expression
+ # C.g:390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );
+ def unary_expression(self, ):
+
+ unary_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 40):
+ return
+
+ # C.g:391:2: ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' )
+ alt64 = 6
+ LA64 = self.input.LA(1)
+ if LA64 == IDENTIFIER or LA64 == HEX_LITERAL or LA64 == OCTAL_LITERAL or LA64 == DECIMAL_LITERAL or LA64 == CHARACTER_LITERAL or LA64 == STRING_LITERAL or LA64 == FLOATING_POINT_LITERAL or LA64 == 62:
+ alt64 = 1
+ elif LA64 == 72:
+ alt64 = 2
+ elif LA64 == 73:
+ alt64 = 3
+ elif LA64 == 66 or LA64 == 68 or LA64 == 69 or LA64 == 77 or LA64 == 78 or LA64 == 79:
+ alt64 = 4
+ elif LA64 == 74:
+ LA64_12 = self.input.LA(2)
+
+ if (LA64_12 == 62) :
+ LA64_13 = self.input.LA(3)
+
+ if (self.synpred114()) :
+ alt64 = 5
+ elif (True) :
+ alt64 = 6
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 13, self.input)
+
+ raise nvae
+
+ elif ((IDENTIFIER <= LA64_12 <= FLOATING_POINT_LITERAL) or LA64_12 == 66 or (68 <= LA64_12 <= 69) or (72 <= LA64_12 <= 74) or (77 <= LA64_12 <= 79)) :
+ alt64 = 5
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 12, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 0, self.input)
+
+ raise nvae
+
+ if alt64 == 1:
+ # C.g:391:4: postfix_expression
+ self.following.append(self.FOLLOW_postfix_expression_in_unary_expression1304)
+ self.postfix_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt64 == 2:
+ # C.g:392:4: '++' unary_expression
+ self.match(self.input, 72, self.FOLLOW_72_in_unary_expression1309)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_unary_expression_in_unary_expression1311)
+ self.unary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt64 == 3:
+ # C.g:393:4: '--' unary_expression
+ self.match(self.input, 73, self.FOLLOW_73_in_unary_expression1316)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_unary_expression_in_unary_expression1318)
+ self.unary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt64 == 4:
+ # C.g:394:4: unary_operator cast_expression
+ self.following.append(self.FOLLOW_unary_operator_in_unary_expression1323)
+ self.unary_operator()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_cast_expression_in_unary_expression1325)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt64 == 5:
+ # C.g:395:4: 'sizeof' unary_expression
+ self.match(self.input, 74, self.FOLLOW_74_in_unary_expression1330)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_unary_expression_in_unary_expression1332)
+ self.unary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt64 == 6:
+ # C.g:396:4: 'sizeof' '(' type_name ')'
+ self.match(self.input, 74, self.FOLLOW_74_in_unary_expression1337)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_unary_expression1339)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_type_name_in_unary_expression1341)
+ self.type_name()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_unary_expression1343)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 40, unary_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end unary_expression
+
+
+ # $ANTLR start postfix_expression
+ # C.g:399:1: postfix_expression : p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* ;
+ def postfix_expression(self, ):
+ self.postfix_expression_stack.append(postfix_expression_scope())
+ postfix_expression_StartIndex = self.input.index()
+ a = None
+ b = None
+ x = None
+ y = None
+ z = None
+ p = None
+
+ c = None
+
+
+
+ self.postfix_expression_stack[-1].FuncCallText = ''
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 41):
+ return
+
+ # C.g:406:2: (p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* )
+ # C.g:406:6: p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )*
+ self.following.append(self.FOLLOW_primary_expression_in_postfix_expression1367)
+ p = self.primary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.postfix_expression_stack[-1].FuncCallText += self.input.toString(p.start, p.stop)
+
+ # C.g:407:9: ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )*
+ while True: #loop65
+ alt65 = 10
+ LA65 = self.input.LA(1)
+ if LA65 == 66:
+ LA65_1 = self.input.LA(2)
+
+ if (LA65_1 == IDENTIFIER) :
+ LA65_30 = self.input.LA(3)
+
+ if (self.synpred120()) :
+ alt65 = 6
+
+
+
+
+ elif LA65 == 64:
+ alt65 = 1
+ elif LA65 == 62:
+ LA65 = self.input.LA(2)
+ if LA65 == 63:
+ alt65 = 2
+ elif LA65 == 29 or LA65 == 30 or LA65 == 31 or LA65 == 32 or LA65 == 33 or LA65 == 34 or LA65 == 35 or LA65 == 36 or LA65 == 37 or LA65 == 38 or LA65 == 39 or LA65 == 40 or LA65 == 41 or LA65 == 42 or LA65 == 45 or LA65 == 46 or LA65 == 48 or LA65 == 49 or LA65 == 50 or LA65 == 51 or LA65 == 52 or LA65 == 53 or LA65 == 54 or LA65 == 55 or LA65 == 56 or LA65 == 57 or LA65 == 58 or LA65 == 59 or LA65 == 60 or LA65 == 61:
+ alt65 = 4
+ elif LA65 == IDENTIFIER:
+ LA65_55 = self.input.LA(3)
+
+ if (self.synpred117()) :
+ alt65 = 3
+ elif (self.synpred118()) :
+ alt65 = 4
+
+
+ elif LA65 == 66:
+ LA65_57 = self.input.LA(3)
+
+ if (self.synpred117()) :
+ alt65 = 3
+ elif (self.synpred118()) :
+ alt65 = 4
+
+
+ elif LA65 == HEX_LITERAL or LA65 == OCTAL_LITERAL or LA65 == DECIMAL_LITERAL or LA65 == CHARACTER_LITERAL or LA65 == STRING_LITERAL or LA65 == FLOATING_POINT_LITERAL or LA65 == 62 or LA65 == 68 or LA65 == 69 or LA65 == 72 or LA65 == 73 or LA65 == 74 or LA65 == 77 or LA65 == 78 or LA65 == 79:
+ alt65 = 3
+
+ elif LA65 == 75:
+ alt65 = 5
+ elif LA65 == 76:
+ alt65 = 7
+ elif LA65 == 72:
+ alt65 = 8
+ elif LA65 == 73:
+ alt65 = 9
+
+ if alt65 == 1:
+ # C.g:407:13: '[' expression ']'
+ self.match(self.input, 64, self.FOLLOW_64_in_postfix_expression1383)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_postfix_expression1385)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 65, self.FOLLOW_65_in_postfix_expression1387)
+ if self.failed:
+ return
+
+
+ elif alt65 == 2:
+ # C.g:408:13: '(' a= ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1401)
+ if self.failed:
+ return
+ a = self.input.LT(1)
+ self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1405)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, a.line, a.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, '')
+
+
+
+ elif alt65 == 3:
+ # C.g:409:13: '(' c= argument_expression_list b= ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1420)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_argument_expression_list_in_postfix_expression1424)
+ c = self.argument_expression_list()
+ self.following.pop()
+ if self.failed:
+ return
+ b = self.input.LT(1)
+ self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1428)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, b.line, b.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, self.input.toString(c.start, c.stop))
+
+
+
+ elif alt65 == 4:
+ # C.g:410:13: '(' macro_parameter_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1444)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_macro_parameter_list_in_postfix_expression1446)
+ self.macro_parameter_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1448)
+ if self.failed:
+ return
+
+
+ elif alt65 == 5:
+ # C.g:411:13: '.' x= IDENTIFIER
+ self.match(self.input, 75, self.FOLLOW_75_in_postfix_expression1462)
+ if self.failed:
+ return
+ x = self.input.LT(1)
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1466)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.postfix_expression_stack[-1].FuncCallText += '.' + x.text
+
+
+
+ elif alt65 == 6:
+ # C.g:412:13: '*' y= IDENTIFIER
+ self.match(self.input, 66, self.FOLLOW_66_in_postfix_expression1482)
+ if self.failed:
+ return
+ y = self.input.LT(1)
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1486)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.postfix_expression_stack[-1].FuncCallText = y.text
+
+
+
+ elif alt65 == 7:
+ # C.g:413:13: '->' z= IDENTIFIER
+ self.match(self.input, 76, self.FOLLOW_76_in_postfix_expression1502)
+ if self.failed:
+ return
+ z = self.input.LT(1)
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1506)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.postfix_expression_stack[-1].FuncCallText += '->' + z.text
+
+
+
+ elif alt65 == 8:
+ # C.g:414:13: '++'
+ self.match(self.input, 72, self.FOLLOW_72_in_postfix_expression1522)
+ if self.failed:
+ return
+
+
+ elif alt65 == 9:
+ # C.g:415:13: '--'
+ self.match(self.input, 73, self.FOLLOW_73_in_postfix_expression1536)
+ if self.failed:
+ return
+
+
+ else:
+ break #loop65
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 41, postfix_expression_StartIndex)
+
+ self.postfix_expression_stack.pop()
+ pass
+
+ return
+
+ # $ANTLR end postfix_expression
+
+
+ # $ANTLR start macro_parameter_list
+ # C.g:419:1: macro_parameter_list : parameter_declaration ( ',' parameter_declaration )* ;
+ def macro_parameter_list(self, ):
+
+ macro_parameter_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 42):
+ return
+
+ # C.g:420:2: ( parameter_declaration ( ',' parameter_declaration )* )
+ # C.g:420:4: parameter_declaration ( ',' parameter_declaration )*
+ self.following.append(self.FOLLOW_parameter_declaration_in_macro_parameter_list1559)
+ self.parameter_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:420:26: ( ',' parameter_declaration )*
+ while True: #loop66
+ alt66 = 2
+ LA66_0 = self.input.LA(1)
+
+ if (LA66_0 == 27) :
+ alt66 = 1
+
+
+ if alt66 == 1:
+ # C.g:420:27: ',' parameter_declaration
+ self.match(self.input, 27, self.FOLLOW_27_in_macro_parameter_list1562)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_parameter_declaration_in_macro_parameter_list1564)
+ self.parameter_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop66
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 42, macro_parameter_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end macro_parameter_list
+
+
+ # $ANTLR start unary_operator
+ # C.g:423:1: unary_operator : ( '&' | '*' | '+' | '-' | '~' | '!' );
+ def unary_operator(self, ):
+
+ unary_operator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 43):
+ return
+
+ # C.g:424:2: ( '&' | '*' | '+' | '-' | '~' | '!' )
+ # C.g:
+ if self.input.LA(1) == 66 or (68 <= self.input.LA(1) <= 69) or (77 <= self.input.LA(1) <= 79):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_unary_operator0
+ )
+ raise mse
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 43, unary_operator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end unary_operator
+
+ class primary_expression_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start primary_expression
+ # C.g:432:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );
+ def primary_expression(self, ):
+
+ retval = self.primary_expression_return()
+ retval.start = self.input.LT(1)
+ primary_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 44):
+ return retval
+
+ # C.g:433:2: ( IDENTIFIER | constant | '(' expression ')' )
+ alt67 = 3
+ LA67 = self.input.LA(1)
+ if LA67 == IDENTIFIER:
+ LA67_1 = self.input.LA(2)
+
+ if (LA67_1 == EOF or LA67_1 == 25 or (27 <= LA67_1 <= 28) or LA67_1 == 44 or LA67_1 == 47 or LA67_1 == 53 or (62 <= LA67_1 <= 66) or (68 <= LA67_1 <= 73) or (75 <= LA67_1 <= 77) or (80 <= LA67_1 <= 102)) :
+ alt67 = 1
+ elif (LA67_1 == IDENTIFIER or LA67_1 == STRING_LITERAL) :
+ alt67 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("432:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );", 67, 1, self.input)
+
+ raise nvae
+
+ elif LA67 == HEX_LITERAL or LA67 == OCTAL_LITERAL or LA67 == DECIMAL_LITERAL or LA67 == CHARACTER_LITERAL or LA67 == STRING_LITERAL or LA67 == FLOATING_POINT_LITERAL:
+ alt67 = 2
+ elif LA67 == 62:
+ alt67 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("432:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );", 67, 0, self.input)
+
+ raise nvae
+
+ if alt67 == 1:
+ # C.g:433:4: IDENTIFIER
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_primary_expression1613)
+ if self.failed:
+ return retval
+
+
+ elif alt67 == 2:
+ # C.g:434:4: constant
+ self.following.append(self.FOLLOW_constant_in_primary_expression1618)
+ self.constant()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ elif alt67 == 3:
+ # C.g:435:4: '(' expression ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_primary_expression1623)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_expression_in_primary_expression1625)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+ self.match(self.input, 63, self.FOLLOW_63_in_primary_expression1627)
+ if self.failed:
+ return retval
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 44, primary_expression_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end primary_expression
+
+
+ # $ANTLR start constant
+ # C.g:438:1: constant : ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL );
+ def constant(self, ):
+
+ constant_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 45):
+ return
+
+ # C.g:439:5: ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL )
+ alt72 = 6
+ LA72 = self.input.LA(1)
+ if LA72 == HEX_LITERAL:
+ alt72 = 1
+ elif LA72 == OCTAL_LITERAL:
+ alt72 = 2
+ elif LA72 == DECIMAL_LITERAL:
+ alt72 = 3
+ elif LA72 == CHARACTER_LITERAL:
+ alt72 = 4
+ elif LA72 == IDENTIFIER or LA72 == STRING_LITERAL:
+ alt72 = 5
+ elif LA72 == FLOATING_POINT_LITERAL:
+ alt72 = 6
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("438:1: constant : ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL );", 72, 0, self.input)
+
+ raise nvae
+
+ if alt72 == 1:
+ # C.g:439:9: HEX_LITERAL
+ self.match(self.input, HEX_LITERAL, self.FOLLOW_HEX_LITERAL_in_constant1643)
+ if self.failed:
+ return
+
+
+ elif alt72 == 2:
+ # C.g:440:9: OCTAL_LITERAL
+ self.match(self.input, OCTAL_LITERAL, self.FOLLOW_OCTAL_LITERAL_in_constant1653)
+ if self.failed:
+ return
+
+
+ elif alt72 == 3:
+ # C.g:441:9: DECIMAL_LITERAL
+ self.match(self.input, DECIMAL_LITERAL, self.FOLLOW_DECIMAL_LITERAL_in_constant1663)
+ if self.failed:
+ return
+
+
+ elif alt72 == 4:
+ # C.g:442:7: CHARACTER_LITERAL
+ self.match(self.input, CHARACTER_LITERAL, self.FOLLOW_CHARACTER_LITERAL_in_constant1671)
+ if self.failed:
+ return
+
+
+ elif alt72 == 5:
+ # C.g:443:7: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )*
+ # C.g:443:7: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+
+ cnt70 = 0
+ while True: #loop70
+ alt70 = 2
+ LA70_0 = self.input.LA(1)
+
+ if (LA70_0 == IDENTIFIER) :
+ LA70_1 = self.input.LA(2)
+
+ if (LA70_1 == STRING_LITERAL) :
+ alt70 = 1
+ elif (LA70_1 == IDENTIFIER) :
+ LA70_33 = self.input.LA(3)
+
+ if (self.synpred138()) :
+ alt70 = 1
+
+
+
+
+ elif (LA70_0 == STRING_LITERAL) :
+ alt70 = 1
+
+
+ if alt70 == 1:
+ # C.g:443:8: ( IDENTIFIER )* ( STRING_LITERAL )+
+ # C.g:443:8: ( IDENTIFIER )*
+ while True: #loop68
+ alt68 = 2
+ LA68_0 = self.input.LA(1)
+
+ if (LA68_0 == IDENTIFIER) :
+ alt68 = 1
+
+
+ if alt68 == 1:
+ # C.g:0:0: IDENTIFIER
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1680)
+ if self.failed:
+ return
+
+
+ else:
+ break #loop68
+
+
+ # C.g:443:20: ( STRING_LITERAL )+
+ cnt69 = 0
+ while True: #loop69
+ alt69 = 2
+ LA69_0 = self.input.LA(1)
+
+ if (LA69_0 == STRING_LITERAL) :
+ LA69_31 = self.input.LA(2)
+
+ if (self.synpred137()) :
+ alt69 = 1
+
+
+
+
+ if alt69 == 1:
+ # C.g:0:0: STRING_LITERAL
+ self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_constant1683)
+ if self.failed:
+ return
+
+
+ else:
+ if cnt69 >= 1:
+ break #loop69
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(69, self.input)
+ raise eee
+
+ cnt69 += 1
+
+
+
+
+ else:
+ if cnt70 >= 1:
+ break #loop70
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(70, self.input)
+ raise eee
+
+ cnt70 += 1
+
+
+ # C.g:443:38: ( IDENTIFIER )*
+ while True: #loop71
+ alt71 = 2
+ LA71_0 = self.input.LA(1)
+
+ if (LA71_0 == IDENTIFIER) :
+ alt71 = 1
+
+
+ if alt71 == 1:
+ # C.g:0:0: IDENTIFIER
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1688)
+ if self.failed:
+ return
+
+
+ else:
+ break #loop71
+
+
+
+
+ elif alt72 == 6:
+ # C.g:444:9: FLOATING_POINT_LITERAL
+ self.match(self.input, FLOATING_POINT_LITERAL, self.FOLLOW_FLOATING_POINT_LITERAL_in_constant1699)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 45, constant_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end constant
+
+ class expression_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start expression
+ # C.g:449:1: expression : assignment_expression ( ',' assignment_expression )* ;
+ def expression(self, ):
+
+ retval = self.expression_return()
+ retval.start = self.input.LT(1)
+ expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 46):
+ return retval
+
+ # C.g:450:2: ( assignment_expression ( ',' assignment_expression )* )
+ # C.g:450:4: assignment_expression ( ',' assignment_expression )*
+ self.following.append(self.FOLLOW_assignment_expression_in_expression1715)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:450:26: ( ',' assignment_expression )*
+ while True: #loop73
+ alt73 = 2
+ LA73_0 = self.input.LA(1)
+
+ if (LA73_0 == 27) :
+ alt73 = 1
+
+
+ if alt73 == 1:
+ # C.g:450:27: ',' assignment_expression
+ self.match(self.input, 27, self.FOLLOW_27_in_expression1718)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_assignment_expression_in_expression1720)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ else:
+ break #loop73
+
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 46, expression_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end expression
+
+
+ # $ANTLR start constant_expression
+ # C.g:453:1: constant_expression : conditional_expression ;
+ def constant_expression(self, ):
+
+ constant_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 47):
+ return
+
+ # C.g:454:2: ( conditional_expression )
+ # C.g:454:4: conditional_expression
+ self.following.append(self.FOLLOW_conditional_expression_in_constant_expression1733)
+ self.conditional_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 47, constant_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end constant_expression
+
+
+ # $ANTLR start assignment_expression
+ # C.g:457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );
+ def assignment_expression(self, ):
+
+ assignment_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 48):
+ return
+
+ # C.g:458:2: ( lvalue assignment_operator assignment_expression | conditional_expression )
+ alt74 = 2
+ LA74 = self.input.LA(1)
+ if LA74 == IDENTIFIER:
+ LA74 = self.input.LA(2)
+ if LA74 == 64:
+ LA74_13 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 13, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_14 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 14, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_15 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 15, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_16 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 16, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_17 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 17, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_18 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 18, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_19 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 19, self.input)
+
+ raise nvae
+
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ elif LA74 == STRING_LITERAL:
+ LA74_21 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 21, self.input)
+
+ raise nvae
+
+ elif LA74 == IDENTIFIER:
+ LA74_22 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 22, self.input)
+
+ raise nvae
+
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 1, self.input)
+
+ raise nvae
+
+ elif LA74 == HEX_LITERAL:
+ LA74 = self.input.LA(2)
+ if LA74 == 64:
+ LA74_44 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 44, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_45 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 45, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_46 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 46, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_47 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 47, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_48 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 48, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_49 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 49, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_50 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 50, self.input)
+
+ raise nvae
+
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 2, self.input)
+
+ raise nvae
+
+ elif LA74 == OCTAL_LITERAL:
+ LA74 = self.input.LA(2)
+ if LA74 == 64:
+ LA74_73 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 73, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_74 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 74, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_75 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 75, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_76 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 76, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_77 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 77, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_78 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 78, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_79 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 79, self.input)
+
+ raise nvae
+
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 3, self.input)
+
+ raise nvae
+
+ elif LA74 == DECIMAL_LITERAL:
+ LA74 = self.input.LA(2)
+ if LA74 == 64:
+ LA74_102 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 102, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_103 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 103, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_104 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 104, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_105 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 105, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_106 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 106, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_107 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 107, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_108 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 108, self.input)
+
+ raise nvae
+
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 4, self.input)
+
+ raise nvae
+
+ elif LA74 == CHARACTER_LITERAL:
+ LA74 = self.input.LA(2)
+ if LA74 == 64:
+ LA74_131 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 131, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_132 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 132, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_133 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 133, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_134 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 134, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_135 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 135, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_136 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 136, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_137 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 137, self.input)
+
+ raise nvae
+
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 5, self.input)
+
+ raise nvae
+
+ elif LA74 == STRING_LITERAL:
+ LA74 = self.input.LA(2)
+ if LA74 == IDENTIFIER:
+ LA74_160 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 160, self.input)
+
+ raise nvae
+
+ elif LA74 == 64:
+ LA74_161 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 161, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_162 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 162, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_163 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 163, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_164 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 164, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_165 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 165, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_166 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 166, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_167 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 167, self.input)
+
+ raise nvae
+
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ elif LA74 == STRING_LITERAL:
+ LA74_189 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 189, self.input)
+
+ raise nvae
+
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 6, self.input)
+
+ raise nvae
+
+ elif LA74 == FLOATING_POINT_LITERAL:
+ LA74 = self.input.LA(2)
+ if LA74 == 64:
+ LA74_191 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 191, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_192 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 192, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_193 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 193, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_194 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 194, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_195 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 195, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_196 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 196, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_197 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 197, self.input)
+
+ raise nvae
+
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 7, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74 = self.input.LA(2)
+ if LA74 == IDENTIFIER:
+ LA74_220 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 220, self.input)
+
+ raise nvae
+
+ elif LA74 == HEX_LITERAL:
+ LA74_221 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 221, self.input)
+
+ raise nvae
+
+ elif LA74 == OCTAL_LITERAL:
+ LA74_222 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 222, self.input)
+
+ raise nvae
+
+ elif LA74 == DECIMAL_LITERAL:
+ LA74_223 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 223, self.input)
+
+ raise nvae
+
+ elif LA74 == CHARACTER_LITERAL:
+ LA74_224 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 224, self.input)
+
+ raise nvae
+
+ elif LA74 == STRING_LITERAL:
+ LA74_225 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 225, self.input)
+
+ raise nvae
+
+ elif LA74 == FLOATING_POINT_LITERAL:
+ LA74_226 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 226, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_227 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 227, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_228 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 228, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_229 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 229, self.input)
+
+ raise nvae
+
+ elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
+ LA74_230 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 230, self.input)
+
+ raise nvae
+
+ elif LA74 == 74:
+ LA74_231 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 231, self.input)
+
+ raise nvae
+
+ elif LA74 == 34 or LA74 == 35 or LA74 == 36 or LA74 == 37 or LA74 == 38 or LA74 == 39 or LA74 == 40 or LA74 == 41 or LA74 == 42 or LA74 == 45 or LA74 == 46 or LA74 == 48 or LA74 == 49 or LA74 == 50 or LA74 == 51 or LA74 == 52 or LA74 == 53 or LA74 == 54 or LA74 == 55 or LA74 == 56 or LA74 == 57 or LA74 == 58 or LA74 == 59 or LA74 == 60 or LA74 == 61:
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 8, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74 = self.input.LA(2)
+ if LA74 == IDENTIFIER:
+ LA74_244 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 244, self.input)
+
+ raise nvae
+
+ elif LA74 == HEX_LITERAL:
+ LA74_245 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 245, self.input)
+
+ raise nvae
+
+ elif LA74 == OCTAL_LITERAL:
+ LA74_246 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 246, self.input)
+
+ raise nvae
+
+ elif LA74 == DECIMAL_LITERAL:
+ LA74_247 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 247, self.input)
+
+ raise nvae
+
+ elif LA74 == CHARACTER_LITERAL:
+ LA74_248 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 248, self.input)
+
+ raise nvae
+
+ elif LA74 == STRING_LITERAL:
+ LA74_249 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 249, self.input)
+
+ raise nvae
+
+ elif LA74 == FLOATING_POINT_LITERAL:
+ LA74_250 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 250, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_251 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 251, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_252 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 252, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_253 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 253, self.input)
+
+ raise nvae
+
+ elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
+ LA74_254 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 254, self.input)
+
+ raise nvae
+
+ elif LA74 == 74:
+ LA74_255 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 255, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 9, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74 = self.input.LA(2)
+ if LA74 == IDENTIFIER:
+ LA74_256 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 256, self.input)
+
+ raise nvae
+
+ elif LA74 == HEX_LITERAL:
+ LA74_257 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 257, self.input)
+
+ raise nvae
+
+ elif LA74 == OCTAL_LITERAL:
+ LA74_258 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 258, self.input)
+
+ raise nvae
+
+ elif LA74 == DECIMAL_LITERAL:
+ LA74_259 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 259, self.input)
+
+ raise nvae
+
+ elif LA74 == CHARACTER_LITERAL:
+ LA74_260 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 260, self.input)
+
+ raise nvae
+
+ elif LA74 == STRING_LITERAL:
+ LA74_261 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 261, self.input)
+
+ raise nvae
+
+ elif LA74 == FLOATING_POINT_LITERAL:
+ LA74_262 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 262, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_263 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 263, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_264 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 264, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_265 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 265, self.input)
+
+ raise nvae
+
+ elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
+ LA74_266 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 266, self.input)
+
+ raise nvae
+
+ elif LA74 == 74:
+ LA74_267 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 267, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 10, self.input)
+
+ raise nvae
+
+ elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
+ LA74 = self.input.LA(2)
+ if LA74 == 62:
+ LA74_268 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 268, self.input)
+
+ raise nvae
+
+ elif LA74 == IDENTIFIER:
+ LA74_269 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 269, self.input)
+
+ raise nvae
+
+ elif LA74 == HEX_LITERAL:
+ LA74_270 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 270, self.input)
+
+ raise nvae
+
+ elif LA74 == OCTAL_LITERAL:
+ LA74_271 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 271, self.input)
+
+ raise nvae
+
+ elif LA74 == DECIMAL_LITERAL:
+ LA74_272 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 272, self.input)
+
+ raise nvae
+
+ elif LA74 == CHARACTER_LITERAL:
+ LA74_273 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 273, self.input)
+
+ raise nvae
+
+ elif LA74 == STRING_LITERAL:
+ LA74_274 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 274, self.input)
+
+ raise nvae
+
+ elif LA74 == FLOATING_POINT_LITERAL:
+ LA74_275 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 275, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_276 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 276, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_277 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 277, self.input)
+
+ raise nvae
+
+ elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
+ LA74_278 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 278, self.input)
+
+ raise nvae
+
+ elif LA74 == 74:
+ LA74_279 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 279, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 11, self.input)
+
+ raise nvae
+
+ elif LA74 == 74:
+ LA74 = self.input.LA(2)
+ if LA74 == 62:
+ LA74_280 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 280, self.input)
+
+ raise nvae
+
+ elif LA74 == IDENTIFIER:
+ LA74_281 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 281, self.input)
+
+ raise nvae
+
+ elif LA74 == HEX_LITERAL:
+ LA74_282 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 282, self.input)
+
+ raise nvae
+
+ elif LA74 == OCTAL_LITERAL:
+ LA74_283 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 283, self.input)
+
+ raise nvae
+
+ elif LA74 == DECIMAL_LITERAL:
+ LA74_284 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 284, self.input)
+
+ raise nvae
+
+ elif LA74 == CHARACTER_LITERAL:
+ LA74_285 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 285, self.input)
+
+ raise nvae
+
+ elif LA74 == STRING_LITERAL:
+ LA74_286 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 286, self.input)
+
+ raise nvae
+
+ elif LA74 == FLOATING_POINT_LITERAL:
+ LA74_287 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 287, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_288 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 288, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_289 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 289, self.input)
+
+ raise nvae
+
+ elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
+ LA74_290 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 290, self.input)
+
+ raise nvae
+
+ elif LA74 == 74:
+ LA74_291 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 291, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 12, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 0, self.input)
+
+ raise nvae
+
+ if alt74 == 1:
+ # C.g:458:4: lvalue assignment_operator assignment_expression
+ self.following.append(self.FOLLOW_lvalue_in_assignment_expression1744)
+ self.lvalue()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_assignment_operator_in_assignment_expression1746)
+ self.assignment_operator()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_assignment_expression_in_assignment_expression1748)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt74 == 2:
+ # C.g:459:4: conditional_expression
+ self.following.append(self.FOLLOW_conditional_expression_in_assignment_expression1753)
+ self.conditional_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 48, assignment_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end assignment_expression
+
+
+ # $ANTLR start lvalue
+ # C.g:462:1: lvalue : unary_expression ;
+ def lvalue(self, ):
+
+ lvalue_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 49):
+ return
+
+ # C.g:463:2: ( unary_expression )
+ # C.g:463:4: unary_expression
+ self.following.append(self.FOLLOW_unary_expression_in_lvalue1765)
+ self.unary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 49, lvalue_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end lvalue
+
+
+ # $ANTLR start assignment_operator
+ # C.g:466:1: assignment_operator : ( '=' | '*=' | '/=' | '%=' | '+=' | '-=' | '<<=' | '>>=' | '&=' | '^=' | '|=' );
+ def assignment_operator(self, ):
+
+ assignment_operator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 50):
+ return
+
+ # C.g:467:2: ( '=' | '*=' | '/=' | '%=' | '+=' | '-=' | '<<=' | '>>=' | '&=' | '^=' | '|=' )
+ # C.g:
+ if self.input.LA(1) == 28 or (80 <= self.input.LA(1) <= 89):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_assignment_operator0
+ )
+ raise mse
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 50, assignment_operator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end assignment_operator
+
+
+ # $ANTLR start conditional_expression
+ # C.g:480:1: conditional_expression : e= logical_or_expression ( '?' expression ':' conditional_expression )? ;
+ def conditional_expression(self, ):
+
+ conditional_expression_StartIndex = self.input.index()
+ e = None
+
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 51):
+ return
+
+ # C.g:481:2: (e= logical_or_expression ( '?' expression ':' conditional_expression )? )
+ # C.g:481:4: e= logical_or_expression ( '?' expression ':' conditional_expression )?
+ self.following.append(self.FOLLOW_logical_or_expression_in_conditional_expression1839)
+ e = self.logical_or_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:481:28: ( '?' expression ':' conditional_expression )?
+ alt75 = 2
+ LA75_0 = self.input.LA(1)
+
+ if (LA75_0 == 90) :
+ alt75 = 1
+ if alt75 == 1:
+ # C.g:481:29: '?' expression ':' conditional_expression
+ self.match(self.input, 90, self.FOLLOW_90_in_conditional_expression1842)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_conditional_expression1844)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 47, self.FOLLOW_47_in_conditional_expression1846)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_conditional_expression_in_conditional_expression1848)
+ self.conditional_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
+
+
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 51, conditional_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end conditional_expression
+
+ class logical_or_expression_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start logical_or_expression
+ # C.g:484:1: logical_or_expression : logical_and_expression ( '||' logical_and_expression )* ;
+ def logical_or_expression(self, ):
+
+ retval = self.logical_or_expression_return()
+ retval.start = self.input.LT(1)
+ logical_or_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 52):
+ return retval
+
+ # C.g:485:2: ( logical_and_expression ( '||' logical_and_expression )* )
+ # C.g:485:4: logical_and_expression ( '||' logical_and_expression )*
+ self.following.append(self.FOLLOW_logical_and_expression_in_logical_or_expression1863)
+ self.logical_and_expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:485:27: ( '||' logical_and_expression )*
+ while True: #loop76
+ alt76 = 2
+ LA76_0 = self.input.LA(1)
+
+ if (LA76_0 == 91) :
+ alt76 = 1
+
+
+ if alt76 == 1:
+ # C.g:485:28: '||' logical_and_expression
+ self.match(self.input, 91, self.FOLLOW_91_in_logical_or_expression1866)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_logical_and_expression_in_logical_or_expression1868)
+ self.logical_and_expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ else:
+ break #loop76
+
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 52, logical_or_expression_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end logical_or_expression
+
+
+ # $ANTLR start logical_and_expression
+ # C.g:488:1: logical_and_expression : inclusive_or_expression ( '&&' inclusive_or_expression )* ;
+ def logical_and_expression(self, ):
+
+ logical_and_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 53):
+ return
+
+ # C.g:489:2: ( inclusive_or_expression ( '&&' inclusive_or_expression )* )
+ # C.g:489:4: inclusive_or_expression ( '&&' inclusive_or_expression )*
+ self.following.append(self.FOLLOW_inclusive_or_expression_in_logical_and_expression1881)
+ self.inclusive_or_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:489:28: ( '&&' inclusive_or_expression )*
+ while True: #loop77
+ alt77 = 2
+ LA77_0 = self.input.LA(1)
+
+ if (LA77_0 == 92) :
+ alt77 = 1
+
+
+ if alt77 == 1:
+ # C.g:489:29: '&&' inclusive_or_expression
+ self.match(self.input, 92, self.FOLLOW_92_in_logical_and_expression1884)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_inclusive_or_expression_in_logical_and_expression1886)
+ self.inclusive_or_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop77
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 53, logical_and_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end logical_and_expression
+
+
+ # $ANTLR start inclusive_or_expression
+ # C.g:492:1: inclusive_or_expression : exclusive_or_expression ( '|' exclusive_or_expression )* ;
+ def inclusive_or_expression(self, ):
+
+ inclusive_or_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 54):
+ return
+
+ # C.g:493:2: ( exclusive_or_expression ( '|' exclusive_or_expression )* )
+ # C.g:493:4: exclusive_or_expression ( '|' exclusive_or_expression )*
+ self.following.append(self.FOLLOW_exclusive_or_expression_in_inclusive_or_expression1899)
+ self.exclusive_or_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:493:28: ( '|' exclusive_or_expression )*
+ while True: #loop78
+ alt78 = 2
+ LA78_0 = self.input.LA(1)
+
+ if (LA78_0 == 93) :
+ alt78 = 1
+
+
+ if alt78 == 1:
+ # C.g:493:29: '|' exclusive_or_expression
+ self.match(self.input, 93, self.FOLLOW_93_in_inclusive_or_expression1902)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_exclusive_or_expression_in_inclusive_or_expression1904)
+ self.exclusive_or_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop78
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 54, inclusive_or_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end inclusive_or_expression
+
+
+ # $ANTLR start exclusive_or_expression
+ # C.g:496:1: exclusive_or_expression : and_expression ( '^' and_expression )* ;
+ def exclusive_or_expression(self, ):
+
+ exclusive_or_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 55):
+ return
+
+ # C.g:497:2: ( and_expression ( '^' and_expression )* )
+ # C.g:497:4: and_expression ( '^' and_expression )*
+ self.following.append(self.FOLLOW_and_expression_in_exclusive_or_expression1917)
+ self.and_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:497:19: ( '^' and_expression )*
+ while True: #loop79
+ alt79 = 2
+ LA79_0 = self.input.LA(1)
+
+ if (LA79_0 == 94) :
+ alt79 = 1
+
+
+ if alt79 == 1:
+ # C.g:497:20: '^' and_expression
+ self.match(self.input, 94, self.FOLLOW_94_in_exclusive_or_expression1920)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_and_expression_in_exclusive_or_expression1922)
+ self.and_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop79
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 55, exclusive_or_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end exclusive_or_expression
+
+
+ # $ANTLR start and_expression
+ # C.g:500:1: and_expression : equality_expression ( '&' equality_expression )* ;
+ def and_expression(self, ):
+
+ and_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 56):
+ return
+
+ # C.g:501:2: ( equality_expression ( '&' equality_expression )* )
+ # C.g:501:4: equality_expression ( '&' equality_expression )*
+ self.following.append(self.FOLLOW_equality_expression_in_and_expression1935)
+ self.equality_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:501:24: ( '&' equality_expression )*
+ while True: #loop80
+ alt80 = 2
+ LA80_0 = self.input.LA(1)
+
+ if (LA80_0 == 77) :
+ alt80 = 1
+
+
+ if alt80 == 1:
+ # C.g:501:25: '&' equality_expression
+ self.match(self.input, 77, self.FOLLOW_77_in_and_expression1938)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_equality_expression_in_and_expression1940)
+ self.equality_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop80
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 56, and_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end and_expression
+
+
+ # $ANTLR start equality_expression
+ # C.g:503:1: equality_expression : relational_expression ( ( '==' | '!=' ) relational_expression )* ;
+ def equality_expression(self, ):
+
+ equality_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 57):
+ return
+
+ # C.g:504:2: ( relational_expression ( ( '==' | '!=' ) relational_expression )* )
+ # C.g:504:4: relational_expression ( ( '==' | '!=' ) relational_expression )*
+ self.following.append(self.FOLLOW_relational_expression_in_equality_expression1952)
+ self.relational_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:504:26: ( ( '==' | '!=' ) relational_expression )*
+ while True: #loop81
+ alt81 = 2
+ LA81_0 = self.input.LA(1)
+
+ if ((95 <= LA81_0 <= 96)) :
+ alt81 = 1
+
+
+ if alt81 == 1:
+ # C.g:504:27: ( '==' | '!=' ) relational_expression
+ if (95 <= self.input.LA(1) <= 96):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_equality_expression1955
+ )
+ raise mse
+
+
+ self.following.append(self.FOLLOW_relational_expression_in_equality_expression1961)
+ self.relational_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop81
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 57, equality_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end equality_expression
+
+
+ # $ANTLR start relational_expression
+ # C.g:507:1: relational_expression : shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* ;
+ def relational_expression(self, ):
+
+ relational_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 58):
+ return
+
+ # C.g:508:2: ( shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* )
+ # C.g:508:4: shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )*
+ self.following.append(self.FOLLOW_shift_expression_in_relational_expression1975)
+ self.shift_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:508:21: ( ( '<' | '>' | '<=' | '>=' ) shift_expression )*
+ while True: #loop82
+ alt82 = 2
+ LA82_0 = self.input.LA(1)
+
+ if ((97 <= LA82_0 <= 100)) :
+ alt82 = 1
+
+
+ if alt82 == 1:
+ # C.g:508:22: ( '<' | '>' | '<=' | '>=' ) shift_expression
+ if (97 <= self.input.LA(1) <= 100):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_relational_expression1978
+ )
+ raise mse
+
+
+ self.following.append(self.FOLLOW_shift_expression_in_relational_expression1988)
+ self.shift_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop82
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 58, relational_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end relational_expression
+
+
+ # $ANTLR start shift_expression
+ # C.g:511:1: shift_expression : additive_expression ( ( '<<' | '>>' ) additive_expression )* ;
+ def shift_expression(self, ):
+
+ shift_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 59):
+ return
+
+ # C.g:512:2: ( additive_expression ( ( '<<' | '>>' ) additive_expression )* )
+ # C.g:512:4: additive_expression ( ( '<<' | '>>' ) additive_expression )*
+ self.following.append(self.FOLLOW_additive_expression_in_shift_expression2001)
+ self.additive_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:512:24: ( ( '<<' | '>>' ) additive_expression )*
+ while True: #loop83
+ alt83 = 2
+ LA83_0 = self.input.LA(1)
+
+ if ((101 <= LA83_0 <= 102)) :
+ alt83 = 1
+
+
+ if alt83 == 1:
+ # C.g:512:25: ( '<<' | '>>' ) additive_expression
+ if (101 <= self.input.LA(1) <= 102):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_shift_expression2004
+ )
+ raise mse
+
+
+ self.following.append(self.FOLLOW_additive_expression_in_shift_expression2010)
+ self.additive_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop83
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 59, shift_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end shift_expression
+
+
+ # $ANTLR start statement
+ # C.g:517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );
+ def statement(self, ):
+
+ statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 60):
+ return
+
+ # C.g:518:2: ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration )
+ alt84 = 11
+ LA84 = self.input.LA(1)
+ if LA84 == IDENTIFIER:
+ LA84 = self.input.LA(2)
+ if LA84 == 62:
+ LA84_43 = self.input.LA(3)
+
+ if (self.synpred169()) :
+ alt84 = 3
+ elif (self.synpred173()) :
+ alt84 = 7
+ elif (self.synpred174()) :
+ alt84 = 8
+ elif (True) :
+ alt84 = 11
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 43, self.input)
+
+ raise nvae
+
+ elif LA84 == 47:
+ alt84 = 1
+ elif LA84 == STRING_LITERAL or LA84 == 27 or LA84 == 28 or LA84 == 64 or LA84 == 68 or LA84 == 69 or LA84 == 70 or LA84 == 71 or LA84 == 72 or LA84 == 73 or LA84 == 75 or LA84 == 76 or LA84 == 77 or LA84 == 80 or LA84 == 81 or LA84 == 82 or LA84 == 83 or LA84 == 84 or LA84 == 85 or LA84 == 86 or LA84 == 87 or LA84 == 88 or LA84 == 89 or LA84 == 90 or LA84 == 91 or LA84 == 92 or LA84 == 93 or LA84 == 94 or LA84 == 95 or LA84 == 96 or LA84 == 97 or LA84 == 98 or LA84 == 99 or LA84 == 100 or LA84 == 101 or LA84 == 102:
+ alt84 = 3
+ elif LA84 == 66:
+ LA84_47 = self.input.LA(3)
+
+ if (self.synpred169()) :
+ alt84 = 3
+ elif (True) :
+ alt84 = 11
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 47, self.input)
+
+ raise nvae
+
+ elif LA84 == IDENTIFIER:
+ LA84_53 = self.input.LA(3)
+
+ if (self.synpred169()) :
+ alt84 = 3
+ elif (True) :
+ alt84 = 11
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 53, self.input)
+
+ raise nvae
+
+ elif LA84 == 25:
+ LA84_68 = self.input.LA(3)
+
+ if (self.synpred169()) :
+ alt84 = 3
+ elif (True) :
+ alt84 = 11
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 68, self.input)
+
+ raise nvae
+
+ elif LA84 == 29 or LA84 == 30 or LA84 == 31 or LA84 == 32 or LA84 == 33 or LA84 == 34 or LA84 == 35 or LA84 == 36 or LA84 == 37 or LA84 == 38 or LA84 == 39 or LA84 == 40 or LA84 == 41 or LA84 == 42 or LA84 == 45 or LA84 == 46 or LA84 == 48 or LA84 == 49 or LA84 == 50 or LA84 == 51 or LA84 == 52 or LA84 == 53 or LA84 == 54 or LA84 == 55 or LA84 == 56 or LA84 == 57 or LA84 == 58 or LA84 == 59 or LA84 == 60 or LA84 == 61:
+ alt84 = 11
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 1, self.input)
+
+ raise nvae
+
+ elif LA84 == 106 or LA84 == 107:
+ alt84 = 1
+ elif LA84 == 43:
+ alt84 = 2
+ elif LA84 == HEX_LITERAL or LA84 == OCTAL_LITERAL or LA84 == DECIMAL_LITERAL or LA84 == CHARACTER_LITERAL or LA84 == STRING_LITERAL or LA84 == FLOATING_POINT_LITERAL or LA84 == 25 or LA84 == 62 or LA84 == 66 or LA84 == 68 or LA84 == 69 or LA84 == 72 or LA84 == 73 or LA84 == 74 or LA84 == 77 or LA84 == 78 or LA84 == 79:
+ alt84 = 3
+ elif LA84 == 108 or LA84 == 110:
+ alt84 = 4
+ elif LA84 == 111 or LA84 == 112 or LA84 == 113:
+ alt84 = 5
+ elif LA84 == 114 or LA84 == 115 or LA84 == 116 or LA84 == 117:
+ alt84 = 6
+ elif LA84 == 103:
+ alt84 = 8
+ elif LA84 == 104:
+ alt84 = 9
+ elif LA84 == 105:
+ alt84 = 10
+ elif LA84 == 26 or LA84 == 29 or LA84 == 30 or LA84 == 31 or LA84 == 32 or LA84 == 33 or LA84 == 34 or LA84 == 35 or LA84 == 36 or LA84 == 37 or LA84 == 38 or LA84 == 39 or LA84 == 40 or LA84 == 41 or LA84 == 42 or LA84 == 45 or LA84 == 46 or LA84 == 48 or LA84 == 49 or LA84 == 50 or LA84 == 51 or LA84 == 52 or LA84 == 53 or LA84 == 54 or LA84 == 55 or LA84 == 56 or LA84 == 57 or LA84 == 58 or LA84 == 59 or LA84 == 60 or LA84 == 61:
+ alt84 = 11
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 0, self.input)
+
+ raise nvae
+
+ if alt84 == 1:
+ # C.g:518:4: labeled_statement
+ self.following.append(self.FOLLOW_labeled_statement_in_statement2025)
+ self.labeled_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 2:
+ # C.g:519:4: compound_statement
+ self.following.append(self.FOLLOW_compound_statement_in_statement2030)
+ self.compound_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 3:
+ # C.g:520:4: expression_statement
+ self.following.append(self.FOLLOW_expression_statement_in_statement2035)
+ self.expression_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 4:
+ # C.g:521:4: selection_statement
+ self.following.append(self.FOLLOW_selection_statement_in_statement2040)
+ self.selection_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 5:
+ # C.g:522:4: iteration_statement
+ self.following.append(self.FOLLOW_iteration_statement_in_statement2045)
+ self.iteration_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 6:
+ # C.g:523:4: jump_statement
+ self.following.append(self.FOLLOW_jump_statement_in_statement2050)
+ self.jump_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 7:
+ # C.g:524:4: macro_statement
+ self.following.append(self.FOLLOW_macro_statement_in_statement2055)
+ self.macro_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 8:
+ # C.g:525:4: asm2_statement
+ self.following.append(self.FOLLOW_asm2_statement_in_statement2060)
+ self.asm2_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 9:
+ # C.g:526:4: asm1_statement
+ self.following.append(self.FOLLOW_asm1_statement_in_statement2065)
+ self.asm1_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 10:
+ # C.g:527:4: asm_statement
+ self.following.append(self.FOLLOW_asm_statement_in_statement2070)
+ self.asm_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 11:
+ # C.g:528:4: declaration
+ self.following.append(self.FOLLOW_declaration_in_statement2075)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 60, statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end statement
+
+
+ # $ANTLR start asm2_statement
+ # C.g:531:1: asm2_statement : ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';' ;
+ def asm2_statement(self, ):
+
+ asm2_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 61):
+ return
+
+ # C.g:532:2: ( ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';' )
+ # C.g:532:4: ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';'
+ # C.g:532:4: ( '__asm__' )?
+ alt85 = 2
+ LA85_0 = self.input.LA(1)
+
+ if (LA85_0 == 103) :
+ alt85 = 1
+ if alt85 == 1:
+ # C.g:0:0: '__asm__'
+ self.match(self.input, 103, self.FOLLOW_103_in_asm2_statement2086)
+ if self.failed:
+ return
+
+
+
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_asm2_statement2089)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_asm2_statement2091)
+ if self.failed:
+ return
+ # C.g:532:30: (~ ( ';' ) )*
+ while True: #loop86
+ alt86 = 2
+ LA86_0 = self.input.LA(1)
+
+ if (LA86_0 == 63) :
+ LA86_1 = self.input.LA(2)
+
+ if ((IDENTIFIER <= LA86_1 <= LINE_COMMAND) or (26 <= LA86_1 <= 117)) :
+ alt86 = 1
+
+
+ elif ((IDENTIFIER <= LA86_0 <= LINE_COMMAND) or (26 <= LA86_0 <= 62) or (64 <= LA86_0 <= 117)) :
+ alt86 = 1
+
+
+ if alt86 == 1:
+ # C.g:532:31: ~ ( ';' )
+ if (IDENTIFIER <= self.input.LA(1) <= LINE_COMMAND) or (26 <= self.input.LA(1) <= 117):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_asm2_statement2094
+ )
+ raise mse
+
+
+
+
+ else:
+ break #loop86
+
+
+ self.match(self.input, 63, self.FOLLOW_63_in_asm2_statement2101)
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_asm2_statement2103)
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 61, asm2_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end asm2_statement
+
+
+ # $ANTLR start asm1_statement
+ # C.g:535:1: asm1_statement : '_asm' '{' (~ ( '}' ) )* '}' ;
+ def asm1_statement(self, ):
+
+ asm1_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 62):
+ return
+
+ # C.g:536:2: ( '_asm' '{' (~ ( '}' ) )* '}' )
+ # C.g:536:4: '_asm' '{' (~ ( '}' ) )* '}'
+ self.match(self.input, 104, self.FOLLOW_104_in_asm1_statement2115)
+ if self.failed:
+ return
+ self.match(self.input, 43, self.FOLLOW_43_in_asm1_statement2117)
+ if self.failed:
+ return
+ # C.g:536:15: (~ ( '}' ) )*
+ while True: #loop87
+ alt87 = 2
+ LA87_0 = self.input.LA(1)
+
+ if ((IDENTIFIER <= LA87_0 <= 43) or (45 <= LA87_0 <= 117)) :
+ alt87 = 1
+
+
+ if alt87 == 1:
+ # C.g:536:16: ~ ( '}' )
+ if (IDENTIFIER <= self.input.LA(1) <= 43) or (45 <= self.input.LA(1) <= 117):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_asm1_statement2120
+ )
+ raise mse
+
+
+
+
+ else:
+ break #loop87
+
+
+ self.match(self.input, 44, self.FOLLOW_44_in_asm1_statement2127)
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 62, asm1_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end asm1_statement
+
+
+ # $ANTLR start asm_statement
+ # C.g:539:1: asm_statement : '__asm' '{' (~ ( '}' ) )* '}' ;
+ def asm_statement(self, ):
+
+ asm_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 63):
+ return
+
+ # C.g:540:2: ( '__asm' '{' (~ ( '}' ) )* '}' )
+ # C.g:540:4: '__asm' '{' (~ ( '}' ) )* '}'
+ self.match(self.input, 105, self.FOLLOW_105_in_asm_statement2138)
+ if self.failed:
+ return
+ self.match(self.input, 43, self.FOLLOW_43_in_asm_statement2140)
+ if self.failed:
+ return
+ # C.g:540:16: (~ ( '}' ) )*
+ while True: #loop88
+ alt88 = 2
+ LA88_0 = self.input.LA(1)
+
+ if ((IDENTIFIER <= LA88_0 <= 43) or (45 <= LA88_0 <= 117)) :
+ alt88 = 1
+
+
+ if alt88 == 1:
+ # C.g:540:17: ~ ( '}' )
+ if (IDENTIFIER <= self.input.LA(1) <= 43) or (45 <= self.input.LA(1) <= 117):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_asm_statement2143
+ )
+ raise mse
+
+
+
+
+ else:
+ break #loop88
+
+
+ self.match(self.input, 44, self.FOLLOW_44_in_asm_statement2150)
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 63, asm_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end asm_statement
+
+
+ # $ANTLR start macro_statement
+ # C.g:543:1: macro_statement : IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')' ;
+ def macro_statement(self, ):
+
+ macro_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 64):
+ return
+
+ # C.g:544:2: ( IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')' )
+ # C.g:544:4: IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')'
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_macro_statement2162)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_macro_statement2164)
+ if self.failed:
+ return
+ # C.g:544:19: ( declaration )*
+ while True: #loop89
+ alt89 = 2
+ LA89 = self.input.LA(1)
+ if LA89 == IDENTIFIER:
+ LA89 = self.input.LA(2)
+ if LA89 == 62:
+ LA89_45 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_47 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 66:
+ LA89_50 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_68 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_71 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_72 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_73 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_74 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_75 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_76 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_77 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_78 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_79 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_80 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_81 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_82 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_83 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_84 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_85 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_86 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 26:
+ LA89 = self.input.LA(2)
+ if LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_87 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_88 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_89 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_90 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_91 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_92 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_93 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_94 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_95 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_96 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_97 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_98 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_99 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_100 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 66:
+ LA89_101 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_102 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_103 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_104 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_105 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_106 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_107 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_108 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_109 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_110 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_111 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_112 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_113 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_114 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_115 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_116 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_117 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_118 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_119 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_120 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_121 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_122 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_123 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_124 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_125 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 34:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_126 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_127 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_128 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_129 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_130 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_131 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_132 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_133 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_134 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_135 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_136 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_137 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_138 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_139 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_140 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_141 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_142 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_143 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_144 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_145 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 35:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_146 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_147 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_148 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_149 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_150 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_151 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_152 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_153 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_154 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_155 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_156 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_157 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_158 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_159 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_160 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_161 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_162 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_163 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_164 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_165 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 36:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_166 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_167 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_168 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_169 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_170 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_171 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_172 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_173 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_174 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_175 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_176 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_177 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_178 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_179 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_180 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_181 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_182 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_183 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_184 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_185 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 37:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_186 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_187 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_188 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_189 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_190 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_191 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_192 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_193 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_194 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_195 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_196 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_197 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_198 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_199 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_200 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_201 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_202 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_203 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_204 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_205 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 38:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_206 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_207 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_208 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_209 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_210 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_211 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_212 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_213 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_214 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_215 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_216 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_217 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_218 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_219 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_220 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_221 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_222 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_223 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_224 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_225 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 39:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_226 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_227 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_228 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_229 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_230 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_231 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_232 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_233 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_234 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_235 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_236 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_237 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_238 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_239 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_240 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_241 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_242 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_243 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_244 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_245 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 40:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_246 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_247 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_248 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_249 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_250 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_251 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_252 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_253 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_254 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_255 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_256 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_257 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_258 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_259 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_260 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_261 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_262 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_263 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_264 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_265 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 41:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_266 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_267 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_268 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_269 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_270 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_271 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_272 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_273 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_274 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_275 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_276 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_277 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_278 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_279 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_280 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_281 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_282 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_283 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_284 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_285 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 42:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_286 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_287 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_288 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_289 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_290 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_291 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_292 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_293 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_294 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_295 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_296 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_297 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_298 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_299 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_300 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_301 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_302 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_303 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_304 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_305 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_40 = self.input.LA(2)
+
+ if (LA89_40 == IDENTIFIER) :
+ LA89_306 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif (LA89_40 == 43) :
+ LA89_307 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+
+ elif LA89 == 48:
+ LA89_41 = self.input.LA(2)
+
+ if (LA89_41 == 43) :
+ LA89_308 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif (LA89_41 == IDENTIFIER) :
+ LA89_309 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 58 or LA89 == 59 or LA89 == 60 or LA89 == 61:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_310 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_311 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_312 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_313 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_314 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_315 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_316 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_317 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_318 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_319 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_320 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_321 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_322 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_323 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_324 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_325 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_326 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_327 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_328 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_329 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+
+ if alt89 == 1:
+ # C.g:0:0: declaration
+ self.following.append(self.FOLLOW_declaration_in_macro_statement2166)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop89
+
+
+ # C.g:544:33: ( statement_list )?
+ alt90 = 2
+ LA90 = self.input.LA(1)
+ if LA90 == IDENTIFIER:
+ LA90 = self.input.LA(2)
+ if LA90 == 25 or LA90 == 29 or LA90 == 30 or LA90 == 31 or LA90 == 32 or LA90 == 33 or LA90 == 34 or LA90 == 35 or LA90 == 36 or LA90 == 37 or LA90 == 38 or LA90 == 39 or LA90 == 40 or LA90 == 41 or LA90 == 42 or LA90 == 45 or LA90 == 46 or LA90 == 47 or LA90 == 48 or LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60 or LA90 == 61:
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_45 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_46 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == IDENTIFIER:
+ LA90_47 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 64:
+ LA90_48 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_49 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_50 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_51 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_52 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_53 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_54 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_55 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_56 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_57 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_58 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_59 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_60 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_61 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_62 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_63 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_64 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_65 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_66 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_67 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_70 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25 or LA90 == 26 or LA90 == 29 or LA90 == 30 or LA90 == 31 or LA90 == 32 or LA90 == 33 or LA90 == 34 or LA90 == 35 or LA90 == 36 or LA90 == 37 or LA90 == 38 or LA90 == 39 or LA90 == 40 or LA90 == 41 or LA90 == 42 or LA90 == 43 or LA90 == 45 or LA90 == 46 or LA90 == 48 or LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60 or LA90 == 61 or LA90 == 103 or LA90 == 104 or LA90 == 105 or LA90 == 106 or LA90 == 107 or LA90 == 108 or LA90 == 110 or LA90 == 111 or LA90 == 112 or LA90 == 113 or LA90 == 114 or LA90 == 115 or LA90 == 116 or LA90 == 117:
+ alt90 = 1
+ elif LA90 == HEX_LITERAL:
+ LA90 = self.input.LA(2)
+ if LA90 == 64:
+ LA90_87 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_88 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_89 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_90 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_91 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_92 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_93 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_94 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_95 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_96 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_97 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_98 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_99 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_100 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_101 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_102 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_103 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_104 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_105 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_106 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_107 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_108 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25:
+ alt90 = 1
+ elif LA90 == OCTAL_LITERAL:
+ LA90 = self.input.LA(2)
+ if LA90 == 64:
+ LA90_111 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_112 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_113 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_114 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_115 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_116 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_117 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_118 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_119 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_120 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_121 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_122 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_123 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_124 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_125 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_126 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_127 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_128 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_129 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_130 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_131 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25:
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_134 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == DECIMAL_LITERAL:
+ LA90 = self.input.LA(2)
+ if LA90 == 64:
+ LA90_135 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_136 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_137 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_138 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_139 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_140 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_141 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_142 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_143 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_144 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_145 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_146 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_147 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_148 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_149 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_150 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_151 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_152 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_153 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_154 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_155 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_156 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25:
+ alt90 = 1
+ elif LA90 == CHARACTER_LITERAL:
+ LA90 = self.input.LA(2)
+ if LA90 == 64:
+ LA90_159 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_160 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_161 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_162 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_163 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_164 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_165 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_166 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_167 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_168 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_169 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_170 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_171 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_172 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_173 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_174 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_175 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_176 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_177 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_178 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_179 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25:
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_181 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90 = self.input.LA(2)
+ if LA90 == IDENTIFIER:
+ LA90_183 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 64:
+ LA90_184 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_185 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_186 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_187 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_188 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_189 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_190 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_191 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_192 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_193 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_194 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_195 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_196 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_197 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_198 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_199 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_200 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_201 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_202 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_203 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_204 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_205 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_206 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25:
+ alt90 = 1
+ elif LA90 == FLOATING_POINT_LITERAL:
+ LA90 = self.input.LA(2)
+ if LA90 == 64:
+ LA90_209 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_210 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_211 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_212 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_213 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_214 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_215 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_216 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_217 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_218 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_219 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_220 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_221 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_222 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_223 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_224 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_225 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_226 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_227 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_228 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_229 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_230 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25:
+ alt90 = 1
+ elif LA90 == 62:
+ LA90 = self.input.LA(2)
+ if LA90 == IDENTIFIER:
+ LA90_233 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == HEX_LITERAL:
+ LA90_234 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == OCTAL_LITERAL:
+ LA90_235 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == DECIMAL_LITERAL:
+ LA90_236 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == CHARACTER_LITERAL:
+ LA90_237 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_238 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == FLOATING_POINT_LITERAL:
+ LA90_239 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_240 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_241 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_242 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
+ LA90_243 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 74:
+ LA90_244 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60 or LA90 == 61:
+ LA90_245 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 34:
+ LA90_246 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 35:
+ LA90_247 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 36:
+ LA90_248 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 37:
+ LA90_249 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 38:
+ LA90_250 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 39:
+ LA90_251 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 40:
+ LA90_252 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 41:
+ LA90_253 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 42:
+ LA90_254 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 45 or LA90 == 46:
+ LA90_255 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 48:
+ LA90_256 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90 = self.input.LA(2)
+ if LA90 == IDENTIFIER:
+ LA90_257 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == HEX_LITERAL:
+ LA90_258 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == OCTAL_LITERAL:
+ LA90_259 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == DECIMAL_LITERAL:
+ LA90_260 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == CHARACTER_LITERAL:
+ LA90_261 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_262 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == FLOATING_POINT_LITERAL:
+ LA90_263 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_264 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_265 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_266 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
+ LA90_267 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 74:
+ LA90_268 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90 = self.input.LA(2)
+ if LA90 == IDENTIFIER:
+ LA90_269 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == HEX_LITERAL:
+ LA90_270 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == OCTAL_LITERAL:
+ LA90_271 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == DECIMAL_LITERAL:
+ LA90_272 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == CHARACTER_LITERAL:
+ LA90_273 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_274 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == FLOATING_POINT_LITERAL:
+ LA90_275 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_276 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_277 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_278 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
+ LA90_279 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 74:
+ LA90_280 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
+ LA90 = self.input.LA(2)
+ if LA90 == 62:
+ LA90_281 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == IDENTIFIER:
+ LA90_282 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == HEX_LITERAL:
+ LA90_283 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == OCTAL_LITERAL:
+ LA90_284 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == DECIMAL_LITERAL:
+ LA90_285 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == CHARACTER_LITERAL:
+ LA90_286 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_287 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == FLOATING_POINT_LITERAL:
+ LA90_288 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_289 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_290 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
+ LA90_291 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 74:
+ LA90_292 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 74:
+ LA90 = self.input.LA(2)
+ if LA90 == 62:
+ LA90_293 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == IDENTIFIER:
+ LA90_294 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == HEX_LITERAL:
+ LA90_295 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == OCTAL_LITERAL:
+ LA90_296 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == DECIMAL_LITERAL:
+ LA90_297 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == CHARACTER_LITERAL:
+ LA90_298 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_299 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == FLOATING_POINT_LITERAL:
+ LA90_300 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_301 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_302 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
+ LA90_303 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 74:
+ LA90_304 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ if alt90 == 1:
+ # C.g:0:0: statement_list
+ self.following.append(self.FOLLOW_statement_list_in_macro_statement2170)
+ self.statement_list()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ # C.g:544:49: ( expression )?
+ alt91 = 2
+ LA91_0 = self.input.LA(1)
+
+ if ((IDENTIFIER <= LA91_0 <= FLOATING_POINT_LITERAL) or LA91_0 == 62 or LA91_0 == 66 or (68 <= LA91_0 <= 69) or (72 <= LA91_0 <= 74) or (77 <= LA91_0 <= 79)) :
+ alt91 = 1
+ if alt91 == 1:
+ # C.g:0:0: expression
+ self.following.append(self.FOLLOW_expression_in_macro_statement2173)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ self.match(self.input, 63, self.FOLLOW_63_in_macro_statement2176)
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 64, macro_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end macro_statement
+
+
+ # $ANTLR start labeled_statement
+ # C.g:547:1: labeled_statement : ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement );
+ def labeled_statement(self, ):
+
+ labeled_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 65):
+ return
+
+ # C.g:548:2: ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement )
+ alt92 = 3
+ LA92 = self.input.LA(1)
+ if LA92 == IDENTIFIER:
+ alt92 = 1
+ elif LA92 == 106:
+ alt92 = 2
+ elif LA92 == 107:
+ alt92 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("547:1: labeled_statement : ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement );", 92, 0, self.input)
+
+ raise nvae
+
+ if alt92 == 1:
+ # C.g:548:4: IDENTIFIER ':' statement
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_labeled_statement2188)
+ if self.failed:
+ return
+ self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2190)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_labeled_statement2192)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt92 == 2:
+ # C.g:549:4: 'case' constant_expression ':' statement
+ self.match(self.input, 106, self.FOLLOW_106_in_labeled_statement2197)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_constant_expression_in_labeled_statement2199)
+ self.constant_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2201)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_labeled_statement2203)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt92 == 3:
+ # C.g:550:4: 'default' ':' statement
+ self.match(self.input, 107, self.FOLLOW_107_in_labeled_statement2208)
+ if self.failed:
+ return
+ self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2210)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_labeled_statement2212)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 65, labeled_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end labeled_statement
+
+ class compound_statement_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start compound_statement
+ # C.g:553:1: compound_statement : '{' ( declaration )* ( statement_list )? '}' ;
+ def compound_statement(self, ):
+
+ retval = self.compound_statement_return()
+ retval.start = self.input.LT(1)
+ compound_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 66):
+ return retval
+
+ # C.g:554:2: ( '{' ( declaration )* ( statement_list )? '}' )
+ # C.g:554:4: '{' ( declaration )* ( statement_list )? '}'
+ self.match(self.input, 43, self.FOLLOW_43_in_compound_statement2223)
+ if self.failed:
+ return retval
+ # C.g:554:8: ( declaration )*
+ while True: #loop93
+ alt93 = 2
+ LA93 = self.input.LA(1)
+ if LA93 == IDENTIFIER:
+ LA93 = self.input.LA(2)
+ if LA93 == 62:
+ LA93_44 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_47 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 66:
+ LA93_48 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_49 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_50 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_51 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_52 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_53 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_54 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_55 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_56 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_57 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_58 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_59 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_60 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_61 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_62 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_63 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_64 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_65 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 26:
+ LA93 = self.input.LA(2)
+ if LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_86 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_87 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_88 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_89 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_90 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_91 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_92 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_93 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_94 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_95 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_96 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_97 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_98 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_99 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 66:
+ LA93_100 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_101 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_102 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_103 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_104 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_105 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_106 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_107 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_108 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_109 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_110 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_111 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_112 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_113 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_114 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_115 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_116 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_117 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_118 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_119 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_120 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_121 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_122 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_123 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_124 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 34:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_125 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_126 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_127 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_128 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_129 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_130 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_131 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_132 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_133 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_134 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_135 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_136 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_137 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_138 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_139 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_140 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_141 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_142 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_143 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_144 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 35:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_145 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_146 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_147 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_148 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_149 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_150 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_151 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_152 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_153 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_154 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_155 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_156 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_157 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_158 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_159 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_160 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_161 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_162 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_163 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_164 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 36:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_165 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_166 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_167 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_168 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_169 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_170 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_171 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_172 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_173 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_174 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_175 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_176 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_177 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_178 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_179 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_180 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_181 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_182 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_183 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_184 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 37:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_185 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_186 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_187 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_188 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_189 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_190 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_191 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_192 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_193 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_194 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_195 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_196 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_197 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_198 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_199 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_200 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_201 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_202 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_203 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_204 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 38:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_205 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_206 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_207 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_208 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_209 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_210 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_211 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_212 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_213 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_214 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_215 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_216 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_217 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_218 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_219 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_220 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_221 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_222 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_223 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_224 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 39:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_225 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_226 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_227 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_228 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_229 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_230 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_231 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_232 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_233 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_234 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_235 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_236 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_237 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_238 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_239 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_240 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_241 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_242 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_243 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_244 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 40:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_245 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_246 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_247 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_248 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_249 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_250 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_251 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_252 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_253 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_254 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_255 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_256 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_257 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_258 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_259 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_260 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_261 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_262 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_263 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_264 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 41:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_265 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_266 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_267 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_268 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_269 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_270 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_271 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_272 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_273 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_274 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_275 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_276 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_277 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_278 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_279 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_280 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_281 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_282 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_283 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_284 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 42:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_285 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_286 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_287 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_288 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_289 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_290 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_291 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_292 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_293 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_294 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_295 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_296 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_297 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_298 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_299 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_300 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_301 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_302 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_303 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_304 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_40 = self.input.LA(2)
+
+ if (LA93_40 == IDENTIFIER) :
+ LA93_305 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif (LA93_40 == 43) :
+ LA93_306 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+
+ elif LA93 == 48:
+ LA93_41 = self.input.LA(2)
+
+ if (LA93_41 == 43) :
+ LA93_307 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif (LA93_41 == IDENTIFIER) :
+ LA93_308 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 58 or LA93 == 59 or LA93 == 60 or LA93 == 61:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_309 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_310 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_311 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_312 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_313 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_314 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_315 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_316 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_317 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_318 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_319 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_320 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_321 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_322 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_323 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_324 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_325 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_326 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_327 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_328 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+
+ if alt93 == 1:
+ # C.g:0:0: declaration
+ self.following.append(self.FOLLOW_declaration_in_compound_statement2225)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ else:
+ break #loop93
+
+
+ # C.g:554:21: ( statement_list )?
+ alt94 = 2
+ LA94_0 = self.input.LA(1)
+
+ if ((IDENTIFIER <= LA94_0 <= FLOATING_POINT_LITERAL) or (25 <= LA94_0 <= 26) or (29 <= LA94_0 <= 43) or (45 <= LA94_0 <= 46) or (48 <= LA94_0 <= 62) or LA94_0 == 66 or (68 <= LA94_0 <= 69) or (72 <= LA94_0 <= 74) or (77 <= LA94_0 <= 79) or (103 <= LA94_0 <= 108) or (110 <= LA94_0 <= 117)) :
+ alt94 = 1
+ if alt94 == 1:
+ # C.g:0:0: statement_list
+ self.following.append(self.FOLLOW_statement_list_in_compound_statement2228)
+ self.statement_list()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+
+ self.match(self.input, 44, self.FOLLOW_44_in_compound_statement2231)
+ if self.failed:
+ return retval
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 66, compound_statement_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end compound_statement
+
+
+ # $ANTLR start statement_list
+ # C.g:557:1: statement_list : ( statement )+ ;
+ def statement_list(self, ):
+
+ statement_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 67):
+ return
+
+ # C.g:558:2: ( ( statement )+ )
+ # C.g:558:4: ( statement )+
+ # C.g:558:4: ( statement )+
+ cnt95 = 0
+ while True: #loop95
+ alt95 = 2
+ LA95 = self.input.LA(1)
+ if LA95 == IDENTIFIER:
+ LA95 = self.input.LA(2)
+ if LA95 == 62:
+ LA95_46 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25 or LA95 == 29 or LA95 == 30 or LA95 == 31 or LA95 == 32 or LA95 == 33 or LA95 == 34 or LA95 == 35 or LA95 == 36 or LA95 == 37 or LA95 == 38 or LA95 == 39 or LA95 == 40 or LA95 == 41 or LA95 == 42 or LA95 == 45 or LA95 == 46 or LA95 == 47 or LA95 == 48 or LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60 or LA95 == 61:
+ alt95 = 1
+ elif LA95 == STRING_LITERAL:
+ LA95_48 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == IDENTIFIER:
+ LA95_49 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 64:
+ LA95_50 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_51 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_52 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_53 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_54 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_55 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_56 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_57 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_58 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_59 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_60 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_61 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_62 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_63 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_64 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_65 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_66 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_67 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_68 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_69 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_88 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == HEX_LITERAL:
+ LA95 = self.input.LA(2)
+ if LA95 == 64:
+ LA95_89 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_90 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_91 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_92 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_93 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_94 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_95 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_96 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_97 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_98 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_99 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_100 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_101 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_102 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_103 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_104 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_105 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_106 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_107 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_108 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_109 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_110 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25:
+ alt95 = 1
+
+ elif LA95 == OCTAL_LITERAL:
+ LA95 = self.input.LA(2)
+ if LA95 == 64:
+ LA95_113 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_114 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_115 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_116 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_117 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_118 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_119 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_120 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_121 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_122 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_123 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_124 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_125 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_126 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_127 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_128 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_129 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_130 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_131 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_132 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_133 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_135 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25:
+ alt95 = 1
+
+ elif LA95 == DECIMAL_LITERAL:
+ LA95 = self.input.LA(2)
+ if LA95 == 64:
+ LA95_137 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_138 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_139 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_140 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_141 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_142 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_143 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_144 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_145 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_146 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_147 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_148 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_149 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_150 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_151 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_152 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_153 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_154 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_155 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_156 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_157 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_158 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25:
+ alt95 = 1
+
+ elif LA95 == CHARACTER_LITERAL:
+ LA95 = self.input.LA(2)
+ if LA95 == 64:
+ LA95_161 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_162 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_163 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_164 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_165 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_166 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_167 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_168 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_169 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_170 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_171 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_172 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_173 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_174 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_175 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_176 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_177 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_178 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_179 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_180 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_181 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_182 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25:
+ alt95 = 1
+
+ elif LA95 == STRING_LITERAL:
+ LA95 = self.input.LA(2)
+ if LA95 == IDENTIFIER:
+ LA95_185 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 64:
+ LA95_186 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_187 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_188 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_189 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_190 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_191 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_192 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_193 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_194 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_195 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_196 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_197 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_198 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_199 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_200 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_201 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_202 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_203 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_204 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_205 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_206 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25:
+ alt95 = 1
+ elif LA95 == STRING_LITERAL:
+ LA95_208 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_209 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == FLOATING_POINT_LITERAL:
+ LA95 = self.input.LA(2)
+ if LA95 == 64:
+ LA95_211 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_212 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_213 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_214 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_215 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_216 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_217 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_218 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_219 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_220 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_221 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_222 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_223 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_224 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_225 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_226 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_227 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_228 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_229 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_230 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_231 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25:
+ alt95 = 1
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_234 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == 62:
+ LA95 = self.input.LA(2)
+ if LA95 == IDENTIFIER:
+ LA95_235 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == HEX_LITERAL:
+ LA95_236 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == OCTAL_LITERAL:
+ LA95_237 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == DECIMAL_LITERAL:
+ LA95_238 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == CHARACTER_LITERAL:
+ LA95_239 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == STRING_LITERAL:
+ LA95_240 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == FLOATING_POINT_LITERAL:
+ LA95_241 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_242 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_243 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_244 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
+ LA95_245 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 74:
+ LA95_246 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60 or LA95 == 61:
+ LA95_247 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 34:
+ LA95_248 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 35:
+ LA95_249 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 36:
+ LA95_250 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 37:
+ LA95_251 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 38:
+ LA95_252 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 39:
+ LA95_253 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 40:
+ LA95_254 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 41:
+ LA95_255 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 42:
+ LA95_256 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 45 or LA95 == 46:
+ LA95_257 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 48:
+ LA95_258 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == 72:
+ LA95 = self.input.LA(2)
+ if LA95 == IDENTIFIER:
+ LA95_259 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == HEX_LITERAL:
+ LA95_260 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == OCTAL_LITERAL:
+ LA95_261 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == DECIMAL_LITERAL:
+ LA95_262 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == CHARACTER_LITERAL:
+ LA95_263 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == STRING_LITERAL:
+ LA95_264 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == FLOATING_POINT_LITERAL:
+ LA95_265 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_266 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_267 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_268 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
+ LA95_269 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 74:
+ LA95_270 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == 73:
+ LA95 = self.input.LA(2)
+ if LA95 == IDENTIFIER:
+ LA95_271 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == HEX_LITERAL:
+ LA95_272 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == OCTAL_LITERAL:
+ LA95_273 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == DECIMAL_LITERAL:
+ LA95_274 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == CHARACTER_LITERAL:
+ LA95_275 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == STRING_LITERAL:
+ LA95_276 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == FLOATING_POINT_LITERAL:
+ LA95_277 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_278 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_279 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_280 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
+ LA95_281 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 74:
+ LA95_282 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
+ LA95 = self.input.LA(2)
+ if LA95 == 62:
+ LA95_283 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == IDENTIFIER:
+ LA95_284 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == HEX_LITERAL:
+ LA95_285 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == OCTAL_LITERAL:
+ LA95_286 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == DECIMAL_LITERAL:
+ LA95_287 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == CHARACTER_LITERAL:
+ LA95_288 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == STRING_LITERAL:
+ LA95_289 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == FLOATING_POINT_LITERAL:
+ LA95_290 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_291 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_292 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
+ LA95_293 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 74:
+ LA95_294 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == 74:
+ LA95 = self.input.LA(2)
+ if LA95 == 62:
+ LA95_295 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == IDENTIFIER:
+ LA95_296 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == HEX_LITERAL:
+ LA95_297 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == OCTAL_LITERAL:
+ LA95_298 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == DECIMAL_LITERAL:
+ LA95_299 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == CHARACTER_LITERAL:
+ LA95_300 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == STRING_LITERAL:
+ LA95_301 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == FLOATING_POINT_LITERAL:
+ LA95_302 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_303 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_304 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
+ LA95_305 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 74:
+ LA95_306 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == 25 or LA95 == 26 or LA95 == 29 or LA95 == 30 or LA95 == 31 or LA95 == 32 or LA95 == 33 or LA95 == 34 or LA95 == 35 or LA95 == 36 or LA95 == 37 or LA95 == 38 or LA95 == 39 or LA95 == 40 or LA95 == 41 or LA95 == 42 or LA95 == 43 or LA95 == 45 or LA95 == 46 or LA95 == 48 or LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60 or LA95 == 61 or LA95 == 103 or LA95 == 104 or LA95 == 105 or LA95 == 106 or LA95 == 107 or LA95 == 108 or LA95 == 110 or LA95 == 111 or LA95 == 112 or LA95 == 113 or LA95 == 114 or LA95 == 115 or LA95 == 116 or LA95 == 117:
+ alt95 = 1
+
+ if alt95 == 1:
+ # C.g:0:0: statement
+ self.following.append(self.FOLLOW_statement_in_statement_list2242)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ if cnt95 >= 1:
+ break #loop95
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(95, self.input)
+ raise eee
+
+ cnt95 += 1
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 67, statement_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end statement_list
+
+ class expression_statement_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start expression_statement
+ # C.g:561:1: expression_statement : ( ';' | expression ';' );
+ def expression_statement(self, ):
+
+ retval = self.expression_statement_return()
+ retval.start = self.input.LT(1)
+ expression_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 68):
+ return retval
+
+ # C.g:562:2: ( ';' | expression ';' )
+ alt96 = 2
+ LA96_0 = self.input.LA(1)
+
+ if (LA96_0 == 25) :
+ alt96 = 1
+ elif ((IDENTIFIER <= LA96_0 <= FLOATING_POINT_LITERAL) or LA96_0 == 62 or LA96_0 == 66 or (68 <= LA96_0 <= 69) or (72 <= LA96_0 <= 74) or (77 <= LA96_0 <= 79)) :
+ alt96 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("561:1: expression_statement : ( ';' | expression ';' );", 96, 0, self.input)
+
+ raise nvae
+
+ if alt96 == 1:
+ # C.g:562:4: ';'
+ self.match(self.input, 25, self.FOLLOW_25_in_expression_statement2254)
+ if self.failed:
+ return retval
+
+
+ elif alt96 == 2:
+ # C.g:563:4: expression ';'
+ self.following.append(self.FOLLOW_expression_in_expression_statement2259)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+ self.match(self.input, 25, self.FOLLOW_25_in_expression_statement2261)
+ if self.failed:
+ return retval
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 68, expression_statement_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end expression_statement
+
+
+ # $ANTLR start selection_statement
+ # C.g:566:1: selection_statement : ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement );
+ def selection_statement(self, ):
+
+ selection_statement_StartIndex = self.input.index()
+ e = None
+
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 69):
+ return
+
+ # C.g:567:2: ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement )
+ alt98 = 2
+ LA98_0 = self.input.LA(1)
+
+ if (LA98_0 == 108) :
+ alt98 = 1
+ elif (LA98_0 == 110) :
+ alt98 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("566:1: selection_statement : ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement );", 98, 0, self.input)
+
+ raise nvae
+
+ if alt98 == 1:
+ # C.g:567:4: 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )?
+ self.match(self.input, 108, self.FOLLOW_108_in_selection_statement2272)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2274)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_selection_statement2278)
+ e = self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_selection_statement2280)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
+
+ self.following.append(self.FOLLOW_statement_in_selection_statement2284)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:567:167: ( options {k=1; backtrack=false; } : 'else' statement )?
+ alt97 = 2
+ LA97_0 = self.input.LA(1)
+
+ if (LA97_0 == 109) :
+ alt97 = 1
+ if alt97 == 1:
+ # C.g:567:200: 'else' statement
+ self.match(self.input, 109, self.FOLLOW_109_in_selection_statement2299)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_selection_statement2301)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ elif alt98 == 2:
+ # C.g:568:4: 'switch' '(' expression ')' statement
+ self.match(self.input, 110, self.FOLLOW_110_in_selection_statement2308)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2310)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_selection_statement2312)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_selection_statement2314)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_selection_statement2316)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 69, selection_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end selection_statement
+
+
+ # $ANTLR start iteration_statement
+ # C.g:571:1: iteration_statement : ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement );
+ def iteration_statement(self, ):
+
+ iteration_statement_StartIndex = self.input.index()
+ e = None
+
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 70):
+ return
+
+ # C.g:572:2: ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement )
+ alt100 = 3
+ LA100 = self.input.LA(1)
+ if LA100 == 111:
+ alt100 = 1
+ elif LA100 == 112:
+ alt100 = 2
+ elif LA100 == 113:
+ alt100 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("571:1: iteration_statement : ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement );", 100, 0, self.input)
+
+ raise nvae
+
+ if alt100 == 1:
+ # C.g:572:4: 'while' '(' e= expression ')' statement
+ self.match(self.input, 111, self.FOLLOW_111_in_iteration_statement2327)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2329)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_iteration_statement2333)
+ e = self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2335)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_iteration_statement2337)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
+
+
+
+ elif alt100 == 2:
+ # C.g:573:4: 'do' statement 'while' '(' e= expression ')' ';'
+ self.match(self.input, 112, self.FOLLOW_112_in_iteration_statement2344)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_iteration_statement2346)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 111, self.FOLLOW_111_in_iteration_statement2348)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2350)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_iteration_statement2354)
+ e = self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2356)
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_iteration_statement2358)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
+
+
+
+ elif alt100 == 3:
+ # C.g:574:4: 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement
+ self.match(self.input, 113, self.FOLLOW_113_in_iteration_statement2365)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2367)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2369)
+ self.expression_statement()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2373)
+ e = self.expression_statement()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:574:58: ( expression )?
+ alt99 = 2
+ LA99_0 = self.input.LA(1)
+
+ if ((IDENTIFIER <= LA99_0 <= FLOATING_POINT_LITERAL) or LA99_0 == 62 or LA99_0 == 66 or (68 <= LA99_0 <= 69) or (72 <= LA99_0 <= 74) or (77 <= LA99_0 <= 79)) :
+ alt99 = 1
+ if alt99 == 1:
+ # C.g:0:0: expression
+ self.following.append(self.FOLLOW_expression_in_iteration_statement2375)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2378)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_iteration_statement2380)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 70, iteration_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end iteration_statement
+
+
+ # $ANTLR start jump_statement
+ # C.g:577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );
+ def jump_statement(self, ):
+
+ jump_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 71):
+ return
+
+ # C.g:578:2: ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' )
+ alt101 = 5
+ LA101 = self.input.LA(1)
+ if LA101 == 114:
+ alt101 = 1
+ elif LA101 == 115:
+ alt101 = 2
+ elif LA101 == 116:
+ alt101 = 3
+ elif LA101 == 117:
+ LA101_4 = self.input.LA(2)
+
+ if (LA101_4 == 25) :
+ alt101 = 4
+ elif ((IDENTIFIER <= LA101_4 <= FLOATING_POINT_LITERAL) or LA101_4 == 62 or LA101_4 == 66 or (68 <= LA101_4 <= 69) or (72 <= LA101_4 <= 74) or (77 <= LA101_4 <= 79)) :
+ alt101 = 5
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 4, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 0, self.input)
+
+ raise nvae
+
+ if alt101 == 1:
+ # C.g:578:4: 'goto' IDENTIFIER ';'
+ self.match(self.input, 114, self.FOLLOW_114_in_jump_statement2393)
+ if self.failed:
+ return
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_jump_statement2395)
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2397)
+ if self.failed:
+ return
+
+
+ elif alt101 == 2:
+ # C.g:579:4: 'continue' ';'
+ self.match(self.input, 115, self.FOLLOW_115_in_jump_statement2402)
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2404)
+ if self.failed:
+ return
+
+
+ elif alt101 == 3:
+ # C.g:580:4: 'break' ';'
+ self.match(self.input, 116, self.FOLLOW_116_in_jump_statement2409)
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2411)
+ if self.failed:
+ return
+
+
+ elif alt101 == 4:
+ # C.g:581:4: 'return' ';'
+ self.match(self.input, 117, self.FOLLOW_117_in_jump_statement2416)
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2418)
+ if self.failed:
+ return
+
+
+ elif alt101 == 5:
+ # C.g:582:4: 'return' expression ';'
+ self.match(self.input, 117, self.FOLLOW_117_in_jump_statement2423)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_jump_statement2425)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2427)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 71, jump_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end jump_statement
+
+ # $ANTLR start synpred2
+ def synpred2_fragment(self, ):
+ # C.g:119:6: ( declaration_specifiers )
+ # C.g:119:6: declaration_specifiers
+ self.following.append(self.FOLLOW_declaration_specifiers_in_synpred2100)
+ self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred2
+
+
+
+ # $ANTLR start synpred4
+ def synpred4_fragment(self, ):
+ # C.g:119:4: ( ( declaration_specifiers )? declarator ( declaration )* '{' )
+ # C.g:119:6: ( declaration_specifiers )? declarator ( declaration )* '{'
+ # C.g:119:6: ( declaration_specifiers )?
+ alt102 = 2
+ LA102 = self.input.LA(1)
+ if LA102 == 29 or LA102 == 30 or LA102 == 31 or LA102 == 32 or LA102 == 33 or LA102 == 34 or LA102 == 35 or LA102 == 36 or LA102 == 37 or LA102 == 38 or LA102 == 39 or LA102 == 40 or LA102 == 41 or LA102 == 42 or LA102 == 45 or LA102 == 46 or LA102 == 48 or LA102 == 49 or LA102 == 50 or LA102 == 51 or LA102 == 52 or LA102 == 53 or LA102 == 54 or LA102 == 55 or LA102 == 56 or LA102 == 57 or LA102 == 61:
+ alt102 = 1
+ elif LA102 == IDENTIFIER:
+ LA102 = self.input.LA(2)
+ if LA102 == 62:
+ LA102_21 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 29 or LA102 == 30 or LA102 == 31 or LA102 == 32 or LA102 == 33:
+ LA102_23 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 34:
+ LA102_24 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 35:
+ LA102_25 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 36:
+ LA102_26 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 37:
+ LA102_27 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 38:
+ LA102_28 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 39:
+ LA102_29 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 40:
+ LA102_30 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 41:
+ LA102_31 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 42:
+ LA102_32 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 45 or LA102 == 46:
+ LA102_33 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 48:
+ LA102_34 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == IDENTIFIER:
+ LA102_35 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 58:
+ LA102_36 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 66:
+ alt102 = 1
+ elif LA102 == 59:
+ LA102_39 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 60:
+ LA102_40 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 49 or LA102 == 50 or LA102 == 51 or LA102 == 52 or LA102 == 53 or LA102 == 54 or LA102 == 55 or LA102 == 56 or LA102 == 57 or LA102 == 61:
+ LA102_41 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 58:
+ LA102_14 = self.input.LA(2)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 59:
+ LA102_16 = self.input.LA(2)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 60:
+ LA102_17 = self.input.LA(2)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ if alt102 == 1:
+ # C.g:0:0: declaration_specifiers
+ self.following.append(self.FOLLOW_declaration_specifiers_in_synpred4100)
+ self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ self.following.append(self.FOLLOW_declarator_in_synpred4103)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:119:41: ( declaration )*
+ while True: #loop103
+ alt103 = 2
+ LA103_0 = self.input.LA(1)
+
+ if (LA103_0 == IDENTIFIER or LA103_0 == 26 or (29 <= LA103_0 <= 42) or (45 <= LA103_0 <= 46) or (48 <= LA103_0 <= 61)) :
+ alt103 = 1
+
+
+ if alt103 == 1:
+ # C.g:0:0: declaration
+ self.following.append(self.FOLLOW_declaration_in_synpred4105)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop103
+
+
+ self.match(self.input, 43, self.FOLLOW_43_in_synpred4108)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred4
+
+
+
+ # $ANTLR start synpred5
+ def synpred5_fragment(self, ):
+ # C.g:120:4: ( declaration )
+ # C.g:120:4: declaration
+ self.following.append(self.FOLLOW_declaration_in_synpred5118)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred5
+
+
+
+ # $ANTLR start synpred7
+ def synpred7_fragment(self, ):
+ # C.g:146:6: ( declaration_specifiers )
+ # C.g:146:6: declaration_specifiers
+ self.following.append(self.FOLLOW_declaration_specifiers_in_synpred7157)
+ self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred7
+
+
+
+ # $ANTLR start synpred10
+ def synpred10_fragment(self, ):
+ # C.g:167:18: ( declaration_specifiers )
+ # C.g:167:18: declaration_specifiers
+ self.following.append(self.FOLLOW_declaration_specifiers_in_synpred10207)
+ self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred10
+
+
+
+ # $ANTLR start synpred14
+ def synpred14_fragment(self, ):
+ # C.g:184:7: ( type_specifier )
+ # C.g:184:7: type_specifier
+ self.following.append(self.FOLLOW_type_specifier_in_synpred14272)
+ self.type_specifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred14
+
+
+
+ # $ANTLR start synpred15
+ def synpred15_fragment(self, ):
+ # C.g:185:13: ( type_qualifier )
+ # C.g:185:13: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_synpred15286)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred15
+
+
+
+ # $ANTLR start synpred33
+ def synpred33_fragment(self, ):
+ # C.g:225:16: ( type_qualifier )
+ # C.g:225:16: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_synpred33444)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred33
+
+
+
+ # $ANTLR start synpred34
+ def synpred34_fragment(self, ):
+ # C.g:225:4: ( IDENTIFIER ( type_qualifier )* declarator )
+ # C.g:225:5: IDENTIFIER ( type_qualifier )* declarator
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred34442)
+ if self.failed:
+ return
+ # C.g:225:16: ( type_qualifier )*
+ while True: #loop106
+ alt106 = 2
+ LA106 = self.input.LA(1)
+ if LA106 == 58:
+ LA106_2 = self.input.LA(2)
+
+ if (self.synpred33()) :
+ alt106 = 1
+
+
+ elif LA106 == 59:
+ LA106_3 = self.input.LA(2)
+
+ if (self.synpred33()) :
+ alt106 = 1
+
+
+ elif LA106 == 60:
+ LA106_4 = self.input.LA(2)
+
+ if (self.synpred33()) :
+ alt106 = 1
+
+
+ elif LA106 == 49 or LA106 == 50 or LA106 == 51 or LA106 == 52 or LA106 == 53 or LA106 == 54 or LA106 == 55 or LA106 == 56 or LA106 == 57 or LA106 == 61:
+ alt106 = 1
+
+ if alt106 == 1:
+ # C.g:0:0: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_synpred34444)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop106
+
+
+ self.following.append(self.FOLLOW_declarator_in_synpred34447)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred34
+
+
+
+ # $ANTLR start synpred39
+ def synpred39_fragment(self, ):
+ # C.g:253:6: ( type_qualifier )
+ # C.g:253:6: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_synpred39566)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred39
+
+
+
+ # $ANTLR start synpred40
+ def synpred40_fragment(self, ):
+ # C.g:253:23: ( type_specifier )
+ # C.g:253:23: type_specifier
+ self.following.append(self.FOLLOW_type_specifier_in_synpred40570)
+ self.type_specifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred40
+
+
+
+ # $ANTLR start synpred66
+ def synpred66_fragment(self, ):
+ # C.g:297:4: ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator )
+ # C.g:297:4: ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator
+ # C.g:297:4: ( pointer )?
+ alt111 = 2
+ LA111_0 = self.input.LA(1)
+
+ if (LA111_0 == 66) :
+ alt111 = 1
+ if alt111 == 1:
+ # C.g:0:0: pointer
+ self.following.append(self.FOLLOW_pointer_in_synpred66784)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ # C.g:297:13: ( 'EFIAPI' )?
+ alt112 = 2
+ LA112_0 = self.input.LA(1)
+
+ if (LA112_0 == 58) :
+ alt112 = 1
+ if alt112 == 1:
+ # C.g:297:14: 'EFIAPI'
+ self.match(self.input, 58, self.FOLLOW_58_in_synpred66788)
+ if self.failed:
+ return
+
+
+
+ # C.g:297:25: ( 'EFI_BOOTSERVICE' )?
+ alt113 = 2
+ LA113_0 = self.input.LA(1)
+
+ if (LA113_0 == 59) :
+ alt113 = 1
+ if alt113 == 1:
+ # C.g:297:26: 'EFI_BOOTSERVICE'
+ self.match(self.input, 59, self.FOLLOW_59_in_synpred66793)
+ if self.failed:
+ return
+
+
+
+ # C.g:297:46: ( 'EFI_RUNTIMESERVICE' )?
+ alt114 = 2
+ LA114_0 = self.input.LA(1)
+
+ if (LA114_0 == 60) :
+ alt114 = 1
+ if alt114 == 1:
+ # C.g:297:47: 'EFI_RUNTIMESERVICE'
+ self.match(self.input, 60, self.FOLLOW_60_in_synpred66798)
+ if self.failed:
+ return
+
+
+
+ self.following.append(self.FOLLOW_direct_declarator_in_synpred66802)
+ self.direct_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred66
+
+
+
+ # $ANTLR start synpred67
+ def synpred67_fragment(self, ):
+ # C.g:303:15: ( declarator_suffix )
+ # C.g:303:15: declarator_suffix
+ self.following.append(self.FOLLOW_declarator_suffix_in_synpred67821)
+ self.declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred67
+
+
+
+ # $ANTLR start synpred69
+ def synpred69_fragment(self, ):
+ # C.g:304:9: ( 'EFIAPI' )
+ # C.g:304:9: 'EFIAPI'
+ self.match(self.input, 58, self.FOLLOW_58_in_synpred69830)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred69
+
+
+
+ # $ANTLR start synpred70
+ def synpred70_fragment(self, ):
+ # C.g:304:35: ( declarator_suffix )
+ # C.g:304:35: declarator_suffix
+ self.following.append(self.FOLLOW_declarator_suffix_in_synpred70838)
+ self.declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred70
+
+
+
+ # $ANTLR start synpred73
+ def synpred73_fragment(self, ):
+ # C.g:310:9: ( '(' parameter_type_list ')' )
+ # C.g:310:9: '(' parameter_type_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_synpred73878)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_parameter_type_list_in_synpred73880)
+ self.parameter_type_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_synpred73882)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred73
+
+
+
+ # $ANTLR start synpred74
+ def synpred74_fragment(self, ):
+ # C.g:311:9: ( '(' identifier_list ')' )
+ # C.g:311:9: '(' identifier_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_synpred74892)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_identifier_list_in_synpred74894)
+ self.identifier_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_synpred74896)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred74
+
+
+
+ # $ANTLR start synpred75
+ def synpred75_fragment(self, ):
+ # C.g:316:8: ( type_qualifier )
+ # C.g:316:8: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_synpred75921)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred75
+
+
+
+ # $ANTLR start synpred76
+ def synpred76_fragment(self, ):
+ # C.g:316:24: ( pointer )
+ # C.g:316:24: pointer
+ self.following.append(self.FOLLOW_pointer_in_synpred76924)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred76
+
+
+
+ # $ANTLR start synpred77
+ def synpred77_fragment(self, ):
+ # C.g:316:4: ( '*' ( type_qualifier )+ ( pointer )? )
+ # C.g:316:4: '*' ( type_qualifier )+ ( pointer )?
+ self.match(self.input, 66, self.FOLLOW_66_in_synpred77919)
+ if self.failed:
+ return
+ # C.g:316:8: ( type_qualifier )+
+ cnt116 = 0
+ while True: #loop116
+ alt116 = 2
+ LA116_0 = self.input.LA(1)
+
+ if ((49 <= LA116_0 <= 61)) :
+ alt116 = 1
+
+
+ if alt116 == 1:
+ # C.g:0:0: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_synpred77921)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ if cnt116 >= 1:
+ break #loop116
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(116, self.input)
+ raise eee
+
+ cnt116 += 1
+
+
+ # C.g:316:24: ( pointer )?
+ alt117 = 2
+ LA117_0 = self.input.LA(1)
+
+ if (LA117_0 == 66) :
+ alt117 = 1
+ if alt117 == 1:
+ # C.g:0:0: pointer
+ self.following.append(self.FOLLOW_pointer_in_synpred77924)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ # $ANTLR end synpred77
+
+
+
+ # $ANTLR start synpred78
+ def synpred78_fragment(self, ):
+ # C.g:317:4: ( '*' pointer )
+ # C.g:317:4: '*' pointer
+ self.match(self.input, 66, self.FOLLOW_66_in_synpred78930)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_pointer_in_synpred78932)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred78
+
+
+
+ # $ANTLR start synpred81
+ def synpred81_fragment(self, ):
+ # C.g:326:32: ( 'OPTIONAL' )
+ # C.g:326:32: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_synpred81977)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred81
+
+
+
+ # $ANTLR start synpred82
+ def synpred82_fragment(self, ):
+ # C.g:326:27: ( ',' ( 'OPTIONAL' )? parameter_declaration )
+ # C.g:326:27: ',' ( 'OPTIONAL' )? parameter_declaration
+ self.match(self.input, 27, self.FOLLOW_27_in_synpred82974)
+ if self.failed:
+ return
+ # C.g:326:31: ( 'OPTIONAL' )?
+ alt119 = 2
+ LA119_0 = self.input.LA(1)
+
+ if (LA119_0 == 53) :
+ LA119_1 = self.input.LA(2)
+
+ if (self.synpred81()) :
+ alt119 = 1
+ if alt119 == 1:
+ # C.g:326:32: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_synpred82977)
+ if self.failed:
+ return
+
+
+
+ self.following.append(self.FOLLOW_parameter_declaration_in_synpred82981)
+ self.parameter_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred82
+
+
+
+ # $ANTLR start synpred83
+ def synpred83_fragment(self, ):
+ # C.g:330:28: ( declarator )
+ # C.g:330:28: declarator
+ self.following.append(self.FOLLOW_declarator_in_synpred83997)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred83
+
+
+
+ # $ANTLR start synpred84
+ def synpred84_fragment(self, ):
+ # C.g:330:39: ( abstract_declarator )
+ # C.g:330:39: abstract_declarator
+ self.following.append(self.FOLLOW_abstract_declarator_in_synpred84999)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred84
+
+
+
+ # $ANTLR start synpred86
+ def synpred86_fragment(self, ):
+ # C.g:330:4: ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? )
+ # C.g:330:4: declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )?
+ self.following.append(self.FOLLOW_declaration_specifiers_in_synpred86994)
+ self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:330:27: ( declarator | abstract_declarator )*
+ while True: #loop120
+ alt120 = 3
+ LA120 = self.input.LA(1)
+ if LA120 == 66:
+ LA120_3 = self.input.LA(2)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+ elif LA120 == IDENTIFIER or LA120 == 58 or LA120 == 59 or LA120 == 60:
+ alt120 = 1
+ elif LA120 == 62:
+ LA120 = self.input.LA(2)
+ if LA120 == 29 or LA120 == 30 or LA120 == 31 or LA120 == 32 or LA120 == 33 or LA120 == 34 or LA120 == 35 or LA120 == 36 or LA120 == 37 or LA120 == 38 or LA120 == 39 or LA120 == 40 or LA120 == 41 or LA120 == 42 or LA120 == 45 or LA120 == 46 or LA120 == 48 or LA120 == 49 or LA120 == 50 or LA120 == 51 or LA120 == 52 or LA120 == 53 or LA120 == 54 or LA120 == 55 or LA120 == 56 or LA120 == 57 or LA120 == 61 or LA120 == 63 or LA120 == 64:
+ alt120 = 2
+ elif LA120 == 58:
+ LA120_21 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+ elif LA120 == 66:
+ LA120_22 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+ elif LA120 == 59:
+ LA120_23 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+ elif LA120 == 60:
+ LA120_24 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+ elif LA120 == IDENTIFIER:
+ LA120_25 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+ elif LA120 == 62:
+ LA120_26 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+
+ elif LA120 == 64:
+ alt120 = 2
+
+ if alt120 == 1:
+ # C.g:330:28: declarator
+ self.following.append(self.FOLLOW_declarator_in_synpred86997)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt120 == 2:
+ # C.g:330:39: abstract_declarator
+ self.following.append(self.FOLLOW_abstract_declarator_in_synpred86999)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop120
+
+
+ # C.g:330:61: ( 'OPTIONAL' )?
+ alt121 = 2
+ LA121_0 = self.input.LA(1)
+
+ if (LA121_0 == 53) :
+ alt121 = 1
+ if alt121 == 1:
+ # C.g:330:62: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_synpred861004)
+ if self.failed:
+ return
+
+
+
+
+
+ # $ANTLR end synpred86
+
+
+
+ # $ANTLR start synpred90
+ def synpred90_fragment(self, ):
+ # C.g:341:4: ( specifier_qualifier_list ( abstract_declarator )? )
+ # C.g:341:4: specifier_qualifier_list ( abstract_declarator )?
+ self.following.append(self.FOLLOW_specifier_qualifier_list_in_synpred901046)
+ self.specifier_qualifier_list()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:341:29: ( abstract_declarator )?
+ alt122 = 2
+ LA122_0 = self.input.LA(1)
+
+ if (LA122_0 == 62 or LA122_0 == 64 or LA122_0 == 66) :
+ alt122 = 1
+ if alt122 == 1:
+ # C.g:0:0: abstract_declarator
+ self.following.append(self.FOLLOW_abstract_declarator_in_synpred901048)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ # $ANTLR end synpred90
+
+
+
+ # $ANTLR start synpred91
+ def synpred91_fragment(self, ):
+ # C.g:346:12: ( direct_abstract_declarator )
+ # C.g:346:12: direct_abstract_declarator
+ self.following.append(self.FOLLOW_direct_abstract_declarator_in_synpred911067)
+ self.direct_abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred91
+
+
+
+ # $ANTLR start synpred93
+ def synpred93_fragment(self, ):
+ # C.g:351:6: ( '(' abstract_declarator ')' )
+ # C.g:351:6: '(' abstract_declarator ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_synpred931086)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_abstract_declarator_in_synpred931088)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_synpred931090)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred93
+
+
+
+ # $ANTLR start synpred94
+ def synpred94_fragment(self, ):
+ # C.g:351:65: ( abstract_declarator_suffix )
+ # C.g:351:65: abstract_declarator_suffix
+ self.following.append(self.FOLLOW_abstract_declarator_suffix_in_synpred941098)
+ self.abstract_declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred94
+
+
+
+ # $ANTLR start synpred109
+ def synpred109_fragment(self, ):
+ # C.g:386:4: ( '(' type_name ')' cast_expression )
+ # C.g:386:4: '(' type_name ')' cast_expression
+ self.match(self.input, 62, self.FOLLOW_62_in_synpred1091282)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_type_name_in_synpred1091284)
+ self.type_name()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_synpred1091286)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_cast_expression_in_synpred1091288)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred109
+
+
+
+ # $ANTLR start synpred114
+ def synpred114_fragment(self, ):
+ # C.g:395:4: ( 'sizeof' unary_expression )
+ # C.g:395:4: 'sizeof' unary_expression
+ self.match(self.input, 74, self.FOLLOW_74_in_synpred1141330)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_unary_expression_in_synpred1141332)
+ self.unary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred114
+
+
+
+ # $ANTLR start synpred117
+ def synpred117_fragment(self, ):
+ # C.g:409:13: ( '(' argument_expression_list ')' )
+ # C.g:409:13: '(' argument_expression_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_synpred1171420)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_argument_expression_list_in_synpred1171424)
+ self.argument_expression_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_synpred1171428)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred117
+
+
+
+ # $ANTLR start synpred118
+ def synpred118_fragment(self, ):
+ # C.g:410:13: ( '(' macro_parameter_list ')' )
+ # C.g:410:13: '(' macro_parameter_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_synpred1181444)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_macro_parameter_list_in_synpred1181446)
+ self.macro_parameter_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_synpred1181448)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred118
+
+
+
+ # $ANTLR start synpred120
+ def synpred120_fragment(self, ):
+ # C.g:412:13: ( '*' IDENTIFIER )
+ # C.g:412:13: '*' IDENTIFIER
+ self.match(self.input, 66, self.FOLLOW_66_in_synpred1201482)
+ if self.failed:
+ return
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1201486)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred120
+
+
+
+ # $ANTLR start synpred137
+ def synpred137_fragment(self, ):
+ # C.g:443:20: ( STRING_LITERAL )
+ # C.g:443:20: STRING_LITERAL
+ self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1371683)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred137
+
+
+
+ # $ANTLR start synpred138
+ def synpred138_fragment(self, ):
+ # C.g:443:8: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )
+ # C.g:443:8: ( IDENTIFIER )* ( STRING_LITERAL )+
+ # C.g:443:8: ( IDENTIFIER )*
+ while True: #loop125
+ alt125 = 2
+ LA125_0 = self.input.LA(1)
+
+ if (LA125_0 == IDENTIFIER) :
+ alt125 = 1
+
+
+ if alt125 == 1:
+ # C.g:0:0: IDENTIFIER
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1381680)
+ if self.failed:
+ return
+
+
+ else:
+ break #loop125
+
+
+ # C.g:443:20: ( STRING_LITERAL )+
+ cnt126 = 0
+ while True: #loop126
+ alt126 = 2
+ LA126_0 = self.input.LA(1)
+
+ if (LA126_0 == STRING_LITERAL) :
+ alt126 = 1
+
+
+ if alt126 == 1:
+ # C.g:0:0: STRING_LITERAL
+ self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1381683)
+ if self.failed:
+ return
+
+
+ else:
+ if cnt126 >= 1:
+ break #loop126
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(126, self.input)
+ raise eee
+
+ cnt126 += 1
+
+
+
+
+ # $ANTLR end synpred138
+
+
+
+ # $ANTLR start synpred142
+ def synpred142_fragment(self, ):
+ # C.g:458:4: ( lvalue assignment_operator assignment_expression )
+ # C.g:458:4: lvalue assignment_operator assignment_expression
+ self.following.append(self.FOLLOW_lvalue_in_synpred1421744)
+ self.lvalue()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_assignment_operator_in_synpred1421746)
+ self.assignment_operator()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_assignment_expression_in_synpred1421748)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred142
+
+
+
+ # $ANTLR start synpred169
+ def synpred169_fragment(self, ):
+ # C.g:520:4: ( expression_statement )
+ # C.g:520:4: expression_statement
+ self.following.append(self.FOLLOW_expression_statement_in_synpred1692035)
+ self.expression_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred169
+
+
+
+ # $ANTLR start synpred173
+ def synpred173_fragment(self, ):
+ # C.g:524:4: ( macro_statement )
+ # C.g:524:4: macro_statement
+ self.following.append(self.FOLLOW_macro_statement_in_synpred1732055)
+ self.macro_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred173
+
+
+
+ # $ANTLR start synpred174
+ def synpred174_fragment(self, ):
+ # C.g:525:4: ( asm2_statement )
+ # C.g:525:4: asm2_statement
+ self.following.append(self.FOLLOW_asm2_statement_in_synpred1742060)
+ self.asm2_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred174
+
+
+
+ # $ANTLR start synpred181
+ def synpred181_fragment(self, ):
+ # C.g:544:19: ( declaration )
+ # C.g:544:19: declaration
+ self.following.append(self.FOLLOW_declaration_in_synpred1812166)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred181
+
+
+
+ # $ANTLR start synpred182
+ def synpred182_fragment(self, ):
+ # C.g:544:33: ( statement_list )
+ # C.g:544:33: statement_list
+ self.following.append(self.FOLLOW_statement_list_in_synpred1822170)
+ self.statement_list()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred182
+
+
+
+ # $ANTLR start synpred186
+ def synpred186_fragment(self, ):
+ # C.g:554:8: ( declaration )
+ # C.g:554:8: declaration
+ self.following.append(self.FOLLOW_declaration_in_synpred1862225)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred186
+
+
+
+ # $ANTLR start synpred188
+ def synpred188_fragment(self, ):
+ # C.g:558:4: ( statement )
+ # C.g:558:4: statement
+ self.following.append(self.FOLLOW_statement_in_synpred1882242)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred188
+
+
+
+ def synpred69(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred69_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred81(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred81_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred82(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred82_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred66(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred66_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred83(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred83_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred84(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred84_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred67(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred67_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred86(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred86_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred120(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred120_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred40(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred40_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred142(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred142_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred182(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred182_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred109(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred109_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred181(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred181_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred186(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred186_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred188(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred188_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred169(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred169_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred117(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred117_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred70(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred70_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred118(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred118_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred34(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred34_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred33(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred33_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred94(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred94_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred39(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred39_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred74(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred74_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred114(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred114_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred93(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred93_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred75(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred75_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred137(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred137_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred90(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred90_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred138(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred138_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred91(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred91_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred73(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred73_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred5(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred5_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred78(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred78_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred7(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred7_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred76(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred76_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred77(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred77_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred2(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred2_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred4(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred4_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred174(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred174_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred173(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred173_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred14(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred14_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred15(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred15_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred10(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred10_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+
+
+
+
+ FOLLOW_external_declaration_in_translation_unit74 = frozenset([1, 4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
+ FOLLOW_function_definition_in_external_declaration113 = frozenset([1])
+ FOLLOW_declaration_in_external_declaration118 = frozenset([1])
+ FOLLOW_macro_statement_in_external_declaration123 = frozenset([1, 25])
+ FOLLOW_25_in_external_declaration126 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_function_definition157 = frozenset([4, 58, 59, 60, 62, 66])
+ FOLLOW_declarator_in_function_definition160 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_declaration_in_function_definition166 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_compound_statement_in_function_definition171 = frozenset([1])
+ FOLLOW_compound_statement_in_function_definition180 = frozenset([1])
+ FOLLOW_26_in_declaration203 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
+ FOLLOW_declaration_specifiers_in_declaration207 = frozenset([4, 58, 59, 60, 62, 66])
+ FOLLOW_init_declarator_list_in_declaration216 = frozenset([25])
+ FOLLOW_25_in_declaration220 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_declaration234 = frozenset([4, 25, 58, 59, 60, 62, 66])
+ FOLLOW_init_declarator_list_in_declaration238 = frozenset([25])
+ FOLLOW_25_in_declaration243 = frozenset([1])
+ FOLLOW_storage_class_specifier_in_declaration_specifiers264 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_specifier_in_declaration_specifiers272 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_qualifier_in_declaration_specifiers286 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_init_declarator_in_init_declarator_list308 = frozenset([1, 27])
+ FOLLOW_27_in_init_declarator_list311 = frozenset([4, 58, 59, 60, 62, 66])
+ FOLLOW_init_declarator_in_init_declarator_list313 = frozenset([1, 27])
+ FOLLOW_declarator_in_init_declarator326 = frozenset([1, 28])
+ FOLLOW_28_in_init_declarator329 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_initializer_in_init_declarator331 = frozenset([1])
+ FOLLOW_set_in_storage_class_specifier0 = frozenset([1])
+ FOLLOW_34_in_type_specifier376 = frozenset([1])
+ FOLLOW_35_in_type_specifier381 = frozenset([1])
+ FOLLOW_36_in_type_specifier386 = frozenset([1])
+ FOLLOW_37_in_type_specifier391 = frozenset([1])
+ FOLLOW_38_in_type_specifier396 = frozenset([1])
+ FOLLOW_39_in_type_specifier401 = frozenset([1])
+ FOLLOW_40_in_type_specifier406 = frozenset([1])
+ FOLLOW_41_in_type_specifier411 = frozenset([1])
+ FOLLOW_42_in_type_specifier416 = frozenset([1])
+ FOLLOW_struct_or_union_specifier_in_type_specifier423 = frozenset([1])
+ FOLLOW_enum_specifier_in_type_specifier433 = frozenset([1])
+ FOLLOW_type_id_in_type_specifier451 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_type_id467 = frozenset([1])
+ FOLLOW_struct_or_union_in_struct_or_union_specifier494 = frozenset([4, 43])
+ FOLLOW_IDENTIFIER_in_struct_or_union_specifier496 = frozenset([43])
+ FOLLOW_43_in_struct_or_union_specifier499 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_struct_declaration_list_in_struct_or_union_specifier501 = frozenset([44])
+ FOLLOW_44_in_struct_or_union_specifier503 = frozenset([1])
+ FOLLOW_struct_or_union_in_struct_or_union_specifier508 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_struct_or_union_specifier510 = frozenset([1])
+ FOLLOW_set_in_struct_or_union0 = frozenset([1])
+ FOLLOW_struct_declaration_in_struct_declaration_list537 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_specifier_qualifier_list_in_struct_declaration549 = frozenset([4, 47, 58, 59, 60, 62, 66])
+ FOLLOW_struct_declarator_list_in_struct_declaration551 = frozenset([25])
+ FOLLOW_25_in_struct_declaration553 = frozenset([1])
+ FOLLOW_type_qualifier_in_specifier_qualifier_list566 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_specifier_in_specifier_qualifier_list570 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_struct_declarator_in_struct_declarator_list584 = frozenset([1, 27])
+ FOLLOW_27_in_struct_declarator_list587 = frozenset([4, 47, 58, 59, 60, 62, 66])
+ FOLLOW_struct_declarator_in_struct_declarator_list589 = frozenset([1, 27])
+ FOLLOW_declarator_in_struct_declarator602 = frozenset([1, 47])
+ FOLLOW_47_in_struct_declarator605 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_constant_expression_in_struct_declarator607 = frozenset([1])
+ FOLLOW_47_in_struct_declarator614 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_constant_expression_in_struct_declarator616 = frozenset([1])
+ FOLLOW_48_in_enum_specifier634 = frozenset([43])
+ FOLLOW_43_in_enum_specifier636 = frozenset([4])
+ FOLLOW_enumerator_list_in_enum_specifier638 = frozenset([27, 44])
+ FOLLOW_27_in_enum_specifier640 = frozenset([44])
+ FOLLOW_44_in_enum_specifier643 = frozenset([1])
+ FOLLOW_48_in_enum_specifier648 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_enum_specifier650 = frozenset([43])
+ FOLLOW_43_in_enum_specifier652 = frozenset([4])
+ FOLLOW_enumerator_list_in_enum_specifier654 = frozenset([27, 44])
+ FOLLOW_27_in_enum_specifier656 = frozenset([44])
+ FOLLOW_44_in_enum_specifier659 = frozenset([1])
+ FOLLOW_48_in_enum_specifier664 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_enum_specifier666 = frozenset([1])
+ FOLLOW_enumerator_in_enumerator_list677 = frozenset([1, 27])
+ FOLLOW_27_in_enumerator_list680 = frozenset([4])
+ FOLLOW_enumerator_in_enumerator_list682 = frozenset([1, 27])
+ FOLLOW_IDENTIFIER_in_enumerator695 = frozenset([1, 28])
+ FOLLOW_28_in_enumerator698 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_constant_expression_in_enumerator700 = frozenset([1])
+ FOLLOW_set_in_type_qualifier0 = frozenset([1])
+ FOLLOW_pointer_in_declarator784 = frozenset([4, 58, 59, 60, 62])
+ FOLLOW_58_in_declarator788 = frozenset([4, 59, 60, 62])
+ FOLLOW_59_in_declarator793 = frozenset([4, 60, 62])
+ FOLLOW_60_in_declarator798 = frozenset([4, 62])
+ FOLLOW_direct_declarator_in_declarator802 = frozenset([1])
+ FOLLOW_pointer_in_declarator808 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_direct_declarator819 = frozenset([1, 62, 64])
+ FOLLOW_declarator_suffix_in_direct_declarator821 = frozenset([1, 62, 64])
+ FOLLOW_62_in_direct_declarator827 = frozenset([4, 58, 59, 60, 62, 66])
+ FOLLOW_58_in_direct_declarator830 = frozenset([4, 58, 59, 60, 62, 66])
+ FOLLOW_declarator_in_direct_declarator834 = frozenset([63])
+ FOLLOW_63_in_direct_declarator836 = frozenset([62, 64])
+ FOLLOW_declarator_suffix_in_direct_declarator838 = frozenset([1, 62, 64])
+ FOLLOW_64_in_declarator_suffix852 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_constant_expression_in_declarator_suffix854 = frozenset([65])
+ FOLLOW_65_in_declarator_suffix856 = frozenset([1])
+ FOLLOW_64_in_declarator_suffix866 = frozenset([65])
+ FOLLOW_65_in_declarator_suffix868 = frozenset([1])
+ FOLLOW_62_in_declarator_suffix878 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_parameter_type_list_in_declarator_suffix880 = frozenset([63])
+ FOLLOW_63_in_declarator_suffix882 = frozenset([1])
+ FOLLOW_62_in_declarator_suffix892 = frozenset([4])
+ FOLLOW_identifier_list_in_declarator_suffix894 = frozenset([63])
+ FOLLOW_63_in_declarator_suffix896 = frozenset([1])
+ FOLLOW_62_in_declarator_suffix906 = frozenset([63])
+ FOLLOW_63_in_declarator_suffix908 = frozenset([1])
+ FOLLOW_66_in_pointer919 = frozenset([49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_qualifier_in_pointer921 = frozenset([1, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_pointer_in_pointer924 = frozenset([1])
+ FOLLOW_66_in_pointer930 = frozenset([66])
+ FOLLOW_pointer_in_pointer932 = frozenset([1])
+ FOLLOW_66_in_pointer937 = frozenset([1])
+ FOLLOW_parameter_list_in_parameter_type_list948 = frozenset([1, 27])
+ FOLLOW_27_in_parameter_type_list951 = frozenset([53, 67])
+ FOLLOW_53_in_parameter_type_list954 = frozenset([67])
+ FOLLOW_67_in_parameter_type_list958 = frozenset([1])
+ FOLLOW_parameter_declaration_in_parameter_list971 = frozenset([1, 27])
+ FOLLOW_27_in_parameter_list974 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_53_in_parameter_list977 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_parameter_declaration_in_parameter_list981 = frozenset([1, 27])
+ FOLLOW_declaration_specifiers_in_parameter_declaration994 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
+ FOLLOW_declarator_in_parameter_declaration997 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
+ FOLLOW_abstract_declarator_in_parameter_declaration999 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
+ FOLLOW_53_in_parameter_declaration1004 = frozenset([1])
+ FOLLOW_pointer_in_parameter_declaration1013 = frozenset([4, 66])
+ FOLLOW_IDENTIFIER_in_parameter_declaration1016 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_identifier_list1027 = frozenset([1, 27])
+ FOLLOW_27_in_identifier_list1031 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_identifier_list1033 = frozenset([1, 27])
+ FOLLOW_specifier_qualifier_list_in_type_name1046 = frozenset([1, 62, 64, 66])
+ FOLLOW_abstract_declarator_in_type_name1048 = frozenset([1])
+ FOLLOW_type_id_in_type_name1054 = frozenset([1])
+ FOLLOW_pointer_in_abstract_declarator1065 = frozenset([1, 62, 64])
+ FOLLOW_direct_abstract_declarator_in_abstract_declarator1067 = frozenset([1])
+ FOLLOW_direct_abstract_declarator_in_abstract_declarator1073 = frozenset([1])
+ FOLLOW_62_in_direct_abstract_declarator1086 = frozenset([62, 64, 66])
+ FOLLOW_abstract_declarator_in_direct_abstract_declarator1088 = frozenset([63])
+ FOLLOW_63_in_direct_abstract_declarator1090 = frozenset([1, 62, 64])
+ FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1094 = frozenset([1, 62, 64])
+ FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1098 = frozenset([1, 62, 64])
+ FOLLOW_64_in_abstract_declarator_suffix1110 = frozenset([65])
+ FOLLOW_65_in_abstract_declarator_suffix1112 = frozenset([1])
+ FOLLOW_64_in_abstract_declarator_suffix1117 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_constant_expression_in_abstract_declarator_suffix1119 = frozenset([65])
+ FOLLOW_65_in_abstract_declarator_suffix1121 = frozenset([1])
+ FOLLOW_62_in_abstract_declarator_suffix1126 = frozenset([63])
+ FOLLOW_63_in_abstract_declarator_suffix1128 = frozenset([1])
+ FOLLOW_62_in_abstract_declarator_suffix1133 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_parameter_type_list_in_abstract_declarator_suffix1135 = frozenset([63])
+ FOLLOW_63_in_abstract_declarator_suffix1137 = frozenset([1])
+ FOLLOW_assignment_expression_in_initializer1150 = frozenset([1])
+ FOLLOW_43_in_initializer1155 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_initializer_list_in_initializer1157 = frozenset([27, 44])
+ FOLLOW_27_in_initializer1159 = frozenset([44])
+ FOLLOW_44_in_initializer1162 = frozenset([1])
+ FOLLOW_initializer_in_initializer_list1173 = frozenset([1, 27])
+ FOLLOW_27_in_initializer_list1176 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_initializer_in_initializer_list1178 = frozenset([1, 27])
+ FOLLOW_assignment_expression_in_argument_expression_list1196 = frozenset([1, 27, 53])
+ FOLLOW_53_in_argument_expression_list1199 = frozenset([1, 27])
+ FOLLOW_27_in_argument_expression_list1204 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_assignment_expression_in_argument_expression_list1206 = frozenset([1, 27, 53])
+ FOLLOW_53_in_argument_expression_list1209 = frozenset([1, 27])
+ FOLLOW_multiplicative_expression_in_additive_expression1225 = frozenset([1, 68, 69])
+ FOLLOW_68_in_additive_expression1229 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_multiplicative_expression_in_additive_expression1231 = frozenset([1, 68, 69])
+ FOLLOW_69_in_additive_expression1235 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_multiplicative_expression_in_additive_expression1237 = frozenset([1, 68, 69])
+ FOLLOW_cast_expression_in_multiplicative_expression1251 = frozenset([1, 66, 70, 71])
+ FOLLOW_66_in_multiplicative_expression1255 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_cast_expression_in_multiplicative_expression1257 = frozenset([1, 66, 70, 71])
+ FOLLOW_70_in_multiplicative_expression1261 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_cast_expression_in_multiplicative_expression1263 = frozenset([1, 66, 70, 71])
+ FOLLOW_71_in_multiplicative_expression1267 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_cast_expression_in_multiplicative_expression1269 = frozenset([1, 66, 70, 71])
+ FOLLOW_62_in_cast_expression1282 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_name_in_cast_expression1284 = frozenset([63])
+ FOLLOW_63_in_cast_expression1286 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_cast_expression_in_cast_expression1288 = frozenset([1])
+ FOLLOW_unary_expression_in_cast_expression1293 = frozenset([1])
+ FOLLOW_postfix_expression_in_unary_expression1304 = frozenset([1])
+ FOLLOW_72_in_unary_expression1309 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_unary_expression_in_unary_expression1311 = frozenset([1])
+ FOLLOW_73_in_unary_expression1316 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_unary_expression_in_unary_expression1318 = frozenset([1])
+ FOLLOW_unary_operator_in_unary_expression1323 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_cast_expression_in_unary_expression1325 = frozenset([1])
+ FOLLOW_74_in_unary_expression1330 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_unary_expression_in_unary_expression1332 = frozenset([1])
+ FOLLOW_74_in_unary_expression1337 = frozenset([62])
+ FOLLOW_62_in_unary_expression1339 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_name_in_unary_expression1341 = frozenset([63])
+ FOLLOW_63_in_unary_expression1343 = frozenset([1])
+ FOLLOW_primary_expression_in_postfix_expression1367 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_64_in_postfix_expression1383 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_postfix_expression1385 = frozenset([65])
+ FOLLOW_65_in_postfix_expression1387 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_62_in_postfix_expression1401 = frozenset([63])
+ FOLLOW_63_in_postfix_expression1405 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_62_in_postfix_expression1420 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_argument_expression_list_in_postfix_expression1424 = frozenset([63])
+ FOLLOW_63_in_postfix_expression1428 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_62_in_postfix_expression1444 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_macro_parameter_list_in_postfix_expression1446 = frozenset([63])
+ FOLLOW_63_in_postfix_expression1448 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_75_in_postfix_expression1462 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_postfix_expression1466 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_66_in_postfix_expression1482 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_postfix_expression1486 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_76_in_postfix_expression1502 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_postfix_expression1506 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_72_in_postfix_expression1522 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_73_in_postfix_expression1536 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_parameter_declaration_in_macro_parameter_list1559 = frozenset([1, 27])
+ FOLLOW_27_in_macro_parameter_list1562 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_parameter_declaration_in_macro_parameter_list1564 = frozenset([1, 27])
+ FOLLOW_set_in_unary_operator0 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_primary_expression1613 = frozenset([1])
+ FOLLOW_constant_in_primary_expression1618 = frozenset([1])
+ FOLLOW_62_in_primary_expression1623 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_primary_expression1625 = frozenset([63])
+ FOLLOW_63_in_primary_expression1627 = frozenset([1])
+ FOLLOW_HEX_LITERAL_in_constant1643 = frozenset([1])
+ FOLLOW_OCTAL_LITERAL_in_constant1653 = frozenset([1])
+ FOLLOW_DECIMAL_LITERAL_in_constant1663 = frozenset([1])
+ FOLLOW_CHARACTER_LITERAL_in_constant1671 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_constant1680 = frozenset([4, 9])
+ FOLLOW_STRING_LITERAL_in_constant1683 = frozenset([1, 4, 9])
+ FOLLOW_IDENTIFIER_in_constant1688 = frozenset([1, 4])
+ FOLLOW_FLOATING_POINT_LITERAL_in_constant1699 = frozenset([1])
+ FOLLOW_assignment_expression_in_expression1715 = frozenset([1, 27])
+ FOLLOW_27_in_expression1718 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_assignment_expression_in_expression1720 = frozenset([1, 27])
+ FOLLOW_conditional_expression_in_constant_expression1733 = frozenset([1])
+ FOLLOW_lvalue_in_assignment_expression1744 = frozenset([28, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89])
+ FOLLOW_assignment_operator_in_assignment_expression1746 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_assignment_expression_in_assignment_expression1748 = frozenset([1])
+ FOLLOW_conditional_expression_in_assignment_expression1753 = frozenset([1])
+ FOLLOW_unary_expression_in_lvalue1765 = frozenset([1])
+ FOLLOW_set_in_assignment_operator0 = frozenset([1])
+ FOLLOW_logical_or_expression_in_conditional_expression1839 = frozenset([1, 90])
+ FOLLOW_90_in_conditional_expression1842 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_conditional_expression1844 = frozenset([47])
+ FOLLOW_47_in_conditional_expression1846 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_conditional_expression_in_conditional_expression1848 = frozenset([1])
+ FOLLOW_logical_and_expression_in_logical_or_expression1863 = frozenset([1, 91])
+ FOLLOW_91_in_logical_or_expression1866 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_logical_and_expression_in_logical_or_expression1868 = frozenset([1, 91])
+ FOLLOW_inclusive_or_expression_in_logical_and_expression1881 = frozenset([1, 92])
+ FOLLOW_92_in_logical_and_expression1884 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_inclusive_or_expression_in_logical_and_expression1886 = frozenset([1, 92])
+ FOLLOW_exclusive_or_expression_in_inclusive_or_expression1899 = frozenset([1, 93])
+ FOLLOW_93_in_inclusive_or_expression1902 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_exclusive_or_expression_in_inclusive_or_expression1904 = frozenset([1, 93])
+ FOLLOW_and_expression_in_exclusive_or_expression1917 = frozenset([1, 94])
+ FOLLOW_94_in_exclusive_or_expression1920 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_and_expression_in_exclusive_or_expression1922 = frozenset([1, 94])
+ FOLLOW_equality_expression_in_and_expression1935 = frozenset([1, 77])
+ FOLLOW_77_in_and_expression1938 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_equality_expression_in_and_expression1940 = frozenset([1, 77])
+ FOLLOW_relational_expression_in_equality_expression1952 = frozenset([1, 95, 96])
+ FOLLOW_set_in_equality_expression1955 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_relational_expression_in_equality_expression1961 = frozenset([1, 95, 96])
+ FOLLOW_shift_expression_in_relational_expression1975 = frozenset([1, 97, 98, 99, 100])
+ FOLLOW_set_in_relational_expression1978 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_shift_expression_in_relational_expression1988 = frozenset([1, 97, 98, 99, 100])
+ FOLLOW_additive_expression_in_shift_expression2001 = frozenset([1, 101, 102])
+ FOLLOW_set_in_shift_expression2004 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_additive_expression_in_shift_expression2010 = frozenset([1, 101, 102])
+ FOLLOW_labeled_statement_in_statement2025 = frozenset([1])
+ FOLLOW_compound_statement_in_statement2030 = frozenset([1])
+ FOLLOW_expression_statement_in_statement2035 = frozenset([1])
+ FOLLOW_selection_statement_in_statement2040 = frozenset([1])
+ FOLLOW_iteration_statement_in_statement2045 = frozenset([1])
+ FOLLOW_jump_statement_in_statement2050 = frozenset([1])
+ FOLLOW_macro_statement_in_statement2055 = frozenset([1])
+ FOLLOW_asm2_statement_in_statement2060 = frozenset([1])
+ FOLLOW_asm1_statement_in_statement2065 = frozenset([1])
+ FOLLOW_asm_statement_in_statement2070 = frozenset([1])
+ FOLLOW_declaration_in_statement2075 = frozenset([1])
+ FOLLOW_103_in_asm2_statement2086 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_asm2_statement2089 = frozenset([62])
+ FOLLOW_62_in_asm2_statement2091 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_set_in_asm2_statement2094 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_63_in_asm2_statement2101 = frozenset([25])
+ FOLLOW_25_in_asm2_statement2103 = frozenset([1])
+ FOLLOW_104_in_asm1_statement2115 = frozenset([43])
+ FOLLOW_43_in_asm1_statement2117 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_set_in_asm1_statement2120 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_44_in_asm1_statement2127 = frozenset([1])
+ FOLLOW_105_in_asm_statement2138 = frozenset([43])
+ FOLLOW_43_in_asm_statement2140 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_set_in_asm_statement2143 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_44_in_asm_statement2150 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_macro_statement2162 = frozenset([62])
+ FOLLOW_62_in_macro_statement2164 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_declaration_in_macro_statement2166 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_list_in_macro_statement2170 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_macro_statement2173 = frozenset([63])
+ FOLLOW_63_in_macro_statement2176 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_labeled_statement2188 = frozenset([47])
+ FOLLOW_47_in_labeled_statement2190 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_labeled_statement2192 = frozenset([1])
+ FOLLOW_106_in_labeled_statement2197 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_constant_expression_in_labeled_statement2199 = frozenset([47])
+ FOLLOW_47_in_labeled_statement2201 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_labeled_statement2203 = frozenset([1])
+ FOLLOW_107_in_labeled_statement2208 = frozenset([47])
+ FOLLOW_47_in_labeled_statement2210 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_labeled_statement2212 = frozenset([1])
+ FOLLOW_43_in_compound_statement2223 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_declaration_in_compound_statement2225 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_list_in_compound_statement2228 = frozenset([44])
+ FOLLOW_44_in_compound_statement2231 = frozenset([1])
+ FOLLOW_statement_in_statement_list2242 = frozenset([1, 4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_25_in_expression_statement2254 = frozenset([1])
+ FOLLOW_expression_in_expression_statement2259 = frozenset([25])
+ FOLLOW_25_in_expression_statement2261 = frozenset([1])
+ FOLLOW_108_in_selection_statement2272 = frozenset([62])
+ FOLLOW_62_in_selection_statement2274 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_selection_statement2278 = frozenset([63])
+ FOLLOW_63_in_selection_statement2280 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_selection_statement2284 = frozenset([1, 109])
+ FOLLOW_109_in_selection_statement2299 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_selection_statement2301 = frozenset([1])
+ FOLLOW_110_in_selection_statement2308 = frozenset([62])
+ FOLLOW_62_in_selection_statement2310 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_selection_statement2312 = frozenset([63])
+ FOLLOW_63_in_selection_statement2314 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_selection_statement2316 = frozenset([1])
+ FOLLOW_111_in_iteration_statement2327 = frozenset([62])
+ FOLLOW_62_in_iteration_statement2329 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_iteration_statement2333 = frozenset([63])
+ FOLLOW_63_in_iteration_statement2335 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_iteration_statement2337 = frozenset([1])
+ FOLLOW_112_in_iteration_statement2344 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_iteration_statement2346 = frozenset([111])
+ FOLLOW_111_in_iteration_statement2348 = frozenset([62])
+ FOLLOW_62_in_iteration_statement2350 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_iteration_statement2354 = frozenset([63])
+ FOLLOW_63_in_iteration_statement2356 = frozenset([25])
+ FOLLOW_25_in_iteration_statement2358 = frozenset([1])
+ FOLLOW_113_in_iteration_statement2365 = frozenset([62])
+ FOLLOW_62_in_iteration_statement2367 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_statement_in_iteration_statement2369 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_statement_in_iteration_statement2373 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_iteration_statement2375 = frozenset([63])
+ FOLLOW_63_in_iteration_statement2378 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_iteration_statement2380 = frozenset([1])
+ FOLLOW_114_in_jump_statement2393 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_jump_statement2395 = frozenset([25])
+ FOLLOW_25_in_jump_statement2397 = frozenset([1])
+ FOLLOW_115_in_jump_statement2402 = frozenset([25])
+ FOLLOW_25_in_jump_statement2404 = frozenset([1])
+ FOLLOW_116_in_jump_statement2409 = frozenset([25])
+ FOLLOW_25_in_jump_statement2411 = frozenset([1])
+ FOLLOW_117_in_jump_statement2416 = frozenset([25])
+ FOLLOW_25_in_jump_statement2418 = frozenset([1])
+ FOLLOW_117_in_jump_statement2423 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_jump_statement2425 = frozenset([25])
+ FOLLOW_25_in_jump_statement2427 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_synpred2100 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_synpred4100 = frozenset([4, 58, 59, 60, 62, 66])
+ FOLLOW_declarator_in_synpred4103 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_declaration_in_synpred4105 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_43_in_synpred4108 = frozenset([1])
+ FOLLOW_declaration_in_synpred5118 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_synpred7157 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_synpred10207 = frozenset([1])
+ FOLLOW_type_specifier_in_synpred14272 = frozenset([1])
+ FOLLOW_type_qualifier_in_synpred15286 = frozenset([1])
+ FOLLOW_type_qualifier_in_synpred33444 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_synpred34442 = frozenset([4, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
+ FOLLOW_type_qualifier_in_synpred34444 = frozenset([4, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
+ FOLLOW_declarator_in_synpred34447 = frozenset([1])
+ FOLLOW_type_qualifier_in_synpred39566 = frozenset([1])
+ FOLLOW_type_specifier_in_synpred40570 = frozenset([1])
+ FOLLOW_pointer_in_synpred66784 = frozenset([4, 58, 59, 60, 62])
+ FOLLOW_58_in_synpred66788 = frozenset([4, 59, 60, 62])
+ FOLLOW_59_in_synpred66793 = frozenset([4, 60, 62])
+ FOLLOW_60_in_synpred66798 = frozenset([4, 62])
+ FOLLOW_direct_declarator_in_synpred66802 = frozenset([1])
+ FOLLOW_declarator_suffix_in_synpred67821 = frozenset([1])
+ FOLLOW_58_in_synpred69830 = frozenset([1])
+ FOLLOW_declarator_suffix_in_synpred70838 = frozenset([1])
+ FOLLOW_62_in_synpred73878 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_parameter_type_list_in_synpred73880 = frozenset([63])
+ FOLLOW_63_in_synpred73882 = frozenset([1])
+ FOLLOW_62_in_synpred74892 = frozenset([4])
+ FOLLOW_identifier_list_in_synpred74894 = frozenset([63])
+ FOLLOW_63_in_synpred74896 = frozenset([1])
+ FOLLOW_type_qualifier_in_synpred75921 = frozenset([1])
+ FOLLOW_pointer_in_synpred76924 = frozenset([1])
+ FOLLOW_66_in_synpred77919 = frozenset([49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_qualifier_in_synpred77921 = frozenset([1, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_pointer_in_synpred77924 = frozenset([1])
+ FOLLOW_66_in_synpred78930 = frozenset([66])
+ FOLLOW_pointer_in_synpred78932 = frozenset([1])
+ FOLLOW_53_in_synpred81977 = frozenset([1])
+ FOLLOW_27_in_synpred82974 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_53_in_synpred82977 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_parameter_declaration_in_synpred82981 = frozenset([1])
+ FOLLOW_declarator_in_synpred83997 = frozenset([1])
+ FOLLOW_abstract_declarator_in_synpred84999 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_synpred86994 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
+ FOLLOW_declarator_in_synpred86997 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
+ FOLLOW_abstract_declarator_in_synpred86999 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
+ FOLLOW_53_in_synpred861004 = frozenset([1])
+ FOLLOW_specifier_qualifier_list_in_synpred901046 = frozenset([1, 62, 64, 66])
+ FOLLOW_abstract_declarator_in_synpred901048 = frozenset([1])
+ FOLLOW_direct_abstract_declarator_in_synpred911067 = frozenset([1])
+ FOLLOW_62_in_synpred931086 = frozenset([62, 64, 66])
+ FOLLOW_abstract_declarator_in_synpred931088 = frozenset([63])
+ FOLLOW_63_in_synpred931090 = frozenset([1])
+ FOLLOW_abstract_declarator_suffix_in_synpred941098 = frozenset([1])
+ FOLLOW_62_in_synpred1091282 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_name_in_synpred1091284 = frozenset([63])
+ FOLLOW_63_in_synpred1091286 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_cast_expression_in_synpred1091288 = frozenset([1])
+ FOLLOW_74_in_synpred1141330 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_unary_expression_in_synpred1141332 = frozenset([1])
+ FOLLOW_62_in_synpred1171420 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_argument_expression_list_in_synpred1171424 = frozenset([63])
+ FOLLOW_63_in_synpred1171428 = frozenset([1])
+ FOLLOW_62_in_synpred1181444 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_macro_parameter_list_in_synpred1181446 = frozenset([63])
+ FOLLOW_63_in_synpred1181448 = frozenset([1])
+ FOLLOW_66_in_synpred1201482 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_synpred1201486 = frozenset([1])
+ FOLLOW_STRING_LITERAL_in_synpred1371683 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_synpred1381680 = frozenset([4, 9])
+ FOLLOW_STRING_LITERAL_in_synpred1381683 = frozenset([1, 9])
+ FOLLOW_lvalue_in_synpred1421744 = frozenset([28, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89])
+ FOLLOW_assignment_operator_in_synpred1421746 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_assignment_expression_in_synpred1421748 = frozenset([1])
+ FOLLOW_expression_statement_in_synpred1692035 = frozenset([1])
+ FOLLOW_macro_statement_in_synpred1732055 = frozenset([1])
+ FOLLOW_asm2_statement_in_synpred1742060 = frozenset([1])
+ FOLLOW_declaration_in_synpred1812166 = frozenset([1])
+ FOLLOW_statement_list_in_synpred1822170 = frozenset([1])
+ FOLLOW_declaration_in_synpred1862225 = frozenset([1])
+ FOLLOW_statement_in_synpred1882242 = frozenset([1])
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser3/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser3/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser3/__init__.py
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/C.g4 b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/C.g4
new file mode 100644
index 00000000..ffc5c121
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/C.g4
@@ -0,0 +1,631 @@
+/* @file
+ This file is used to be the grammar file of ECC tool
+
+ Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+*/
+
+
+grammar C;
+options {
+ language=Python;
+}
+
+
+@header {
+## @file
+# The file defines the parser for C source files.
+#
+# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
+#
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at:
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+##
+
+import Ecc.CodeFragment as CodeFragment
+import Ecc.FileProfile as FileProfile
+}
+
+@members {
+
+def printTokenInfo(self, line, offset, tokenText):
+ print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
+
+def StorePredicateExpression(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.PredicateExpressionList.append(PredExp)
+
+def StoreEnumerationDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.EnumerationDefinitionList.append(EnumDef)
+
+def StoreStructUnionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.StructUnionDefinitionList.append(SUDef)
+
+def StoreTypedefDefinition(self, StartLine, StartOffset, EndLine, EndOffset, FromText, ToText):
+ Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.TypedefDefinitionList.append(Tdef)
+
+def StoreFunctionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText, LeftBraceLine, LeftBraceOffset, DeclLine, DeclOffset):
+ FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
+ FileProfile.FunctionDefinitionList.append(FuncDef)
+
+def StoreVariableDeclaration(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText):
+ VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.VariableDeclarationList.append(VarDecl)
+
+def StoreFunctionCalling(self, StartLine, StartOffset, EndLine, EndOffset, FuncName, ParamList):
+ FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.FunctionCallingList.append(FuncCall)
+
+}
+
+translation_unit
+ : external_declaration*
+ ;
+
+
+external_declaration
+ : ( declaration_specifiers? declarator declaration* '{' )
+ | function_definition
+ | declaration
+ | macro_statement (';')?
+ ;
+
+function_definition
+locals [String ModifierText = '', String DeclText = '', int LBLine = 0, int LBOffset = 0, int DeclLine = 0, int DeclOffset = 0]
+@init {
+ModifierText = '';
+DeclText = '';
+LBLine = 0;
+LBOffset = 0;
+DeclLine = 0;
+DeclOffset = 0;
+}
+@after{
+self.StoreFunctionDefinition(localctx.start.line, localctx.start.column, localctx.stop.line, localctx.stop.column, ModifierText, DeclText, LBLine, LBOffset, DeclLine, DeclOffset)
+}
+ : d=declaration_specifiers? declarator
+ ( declaration+ a=compound_statement // K&R style
+ | b=compound_statement // ANSI style
+ ) {
+if localctx.d != None:
+ ModifierText = $declaration_specifiers.text
+else:
+ ModifierText = ''
+DeclText = $declarator.text
+DeclLine = $declarator.start.line
+DeclOffset = $declarator.start.column
+if localctx.a != None:
+ LBLine = $a.start.line
+ LBOffset = $a.start.column
+else:
+ LBLine = $b.start.line
+ LBOffset = $b.start.column
+ }
+ ;
+
+
+declaration_specifiers
+ : ( storage_class_specifier
+ | type_specifier
+ | type_qualifier
+ )+
+ ;
+
+declaration
+ : a='typedef' b=declaration_specifiers? c=init_declarator_list d=';'
+ {
+if localctx.b is not None:
+ self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, $d.line, localctx.d.column, $b.text, $c.text)
+else:
+ self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, $d.line, localctx.d.column, '', $c.text)
+ }
+ | s=declaration_specifiers t=init_declarator_list? e=';'
+ {
+if localctx.t is not None:
+ self.StoreVariableDeclaration($s.start.line, $s.start.column, $t.start.line, $t.start.column, $s.text, $t.text)
+}
+ ;
+
+init_declarator_list
+ : init_declarator (',' init_declarator)*
+ ;
+
+init_declarator
+ : declarator ('=' initializer)?
+ ;
+
+storage_class_specifier
+ : 'extern'
+ | 'static'
+ | 'auto'
+ | 'register'
+ | 'STATIC'
+ ;
+
+type_specifier
+ : 'void'
+ | 'char'
+ | 'short'
+ | 'int'
+ | 'long'
+ | 'float'
+ | 'double'
+ | 'signed'
+ | 'unsigned'
+ | s=struct_or_union_specifier
+ {
+if localctx.s.stop is not None:
+ self.StoreStructUnionDefinition($s.start.line, $s.start.column, $s.stop.line, $s.stop.column, $s.text)
+}
+ | e=enum_specifier
+ {
+if localctx.e.stop is not None:
+ self.StoreEnumerationDefinition($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)
+}
+ | (IDENTIFIER type_qualifier* declarator)
+ | type_id
+ ;
+
+type_id
+ : IDENTIFIER
+ //{self.printTokenInfo($a.line, $a.pos, $a.text)}
+ ;
+
+struct_or_union_specifier
+ : struct_or_union IDENTIFIER? '{' struct_declaration_list '}'
+ | struct_or_union IDENTIFIER
+ ;
+
+struct_or_union
+ : 'struct'
+ | 'union'
+ ;
+
+struct_declaration_list
+ : struct_declaration+
+ ;
+
+struct_declaration
+ : specifier_qualifier_list struct_declarator_list ';'
+ ;
+
+specifier_qualifier_list
+ : ( type_qualifier | type_specifier )+
+ ;
+
+struct_declarator_list
+ : struct_declarator (',' struct_declarator)*
+ ;
+
+struct_declarator
+ : declarator (':' constant_expression)?
+ | ':' constant_expression
+ ;
+
+enum_specifier
+ : 'enum' '{' enumerator_list ','? '}'
+ | 'enum' IDENTIFIER '{' enumerator_list ','? '}'
+ | 'enum' IDENTIFIER
+ ;
+
+enumerator_list
+ : enumerator (',' enumerator)*
+ ;
+
+enumerator
+ : IDENTIFIER ('=' constant_expression)?
+ ;
+
+type_qualifier
+ : 'const'
+ | 'volatile'
+ | 'IN'
+ | 'OUT'
+ | 'OPTIONAL'
+ | 'CONST'
+ | 'UNALIGNED'
+ | 'VOLATILE'
+ | 'GLOBAL_REMOVE_IF_UNREFERENCED'
+ | 'EFIAPI'
+ | 'EFI_BOOTSERVICE'
+ | 'EFI_RUNTIMESERVICE'
+ | 'PACKED'
+ ;
+
+declarator
+ : pointer? ('EFIAPI')? ('EFI_BOOTSERVICE')? ('EFI_RUNTIMESERVICE')? direct_declarator
+// | ('EFIAPI')? ('EFI_BOOTSERVICE')? ('EFI_RUNTIMESERVICE')? pointer? direct_declarator
+ | pointer
+ ;
+
+direct_declarator
+ : IDENTIFIER declarator_suffix*
+ | '(' ('EFIAPI')? declarator ')' declarator_suffix+
+ ;
+
+declarator_suffix
+ : '[' constant_expression ']'
+ | '[' ']'
+ | '(' parameter_type_list ')'
+ | '(' identifier_list ')'
+ | '(' ')'
+ ;
+
+pointer
+ : '*' type_qualifier+ pointer?
+ | '*' pointer
+ | '*'
+ ;
+
+parameter_type_list
+ : parameter_list (',' ('OPTIONAL')? '...')?
+ ;
+
+parameter_list
+ : parameter_declaration (',' ('OPTIONAL')? parameter_declaration)*
+ ;
+
+parameter_declaration
+ : declaration_specifiers (declarator|abstract_declarator)* ('OPTIONAL')?
+ //accomerdate user-defined type only, no declarator follow.
+ | pointer* IDENTIFIER
+ ;
+
+identifier_list
+ : IDENTIFIER
+ (',' IDENTIFIER)*
+ ;
+
+type_name
+ : specifier_qualifier_list abstract_declarator?
+ | type_id
+ ;
+
+abstract_declarator
+ : pointer direct_abstract_declarator?
+ | direct_abstract_declarator
+ ;
+
+direct_abstract_declarator
+ : ( '(' abstract_declarator ')' | abstract_declarator_suffix ) abstract_declarator_suffix*
+ ;
+
+abstract_declarator_suffix
+ : '[' ']'
+ | '[' constant_expression ']'
+ | '(' ')'
+ | '(' parameter_type_list ')'
+ ;
+
+initializer
+
+ : assignment_expression
+ | '{' initializer_list ','? '}'
+ ;
+
+initializer_list
+ : initializer (',' initializer )*
+ ;
+
+// E x p r e s s i o n s
+
+argument_expression_list
+ : assignment_expression ('OPTIONAL')? (',' assignment_expression ('OPTIONAL')?)*
+ ;
+
+additive_expression
+ : (multiplicative_expression) ('+' multiplicative_expression | '-' multiplicative_expression)*
+ ;
+
+multiplicative_expression
+ : (cast_expression) ('*' cast_expression | '/' cast_expression | '%' cast_expression)*
+ ;
+
+cast_expression
+ : '(' type_name ')' cast_expression
+ | unary_expression
+ ;
+
+unary_expression
+ : postfix_expression
+ | '++' unary_expression
+ | '--' unary_expression
+ | unary_operator cast_expression
+ | 'sizeof' unary_expression
+ | 'sizeof' '(' type_name ')'
+ ;
+
+postfix_expression
+locals [FuncCallText='']
+@init
+ {
+self.FuncCallText=''
+ }
+ : p=primary_expression {self.FuncCallText += $p.text}
+ ( '[' expression ']'
+ | '(' a=')'{self.StoreFunctionCalling($p.start.line, $p.start.column, $a.line, localctx.a.column, self.FuncCallText, '')}
+ | '(' c=argument_expression_list b=')' {self.StoreFunctionCalling($p.start.line, $p.start.column, $b.line, localctx.b.column, self.FuncCallText, $c.text)}
+ | '(' macro_parameter_list ')'
+ | '.' x=IDENTIFIER {self.FuncCallText += '.' + $x.text}
+ | '*' y=IDENTIFIER {self.FuncCallText = $y.text}
+ | '->' z=IDENTIFIER {self.FuncCallText += '->' + $z.text}
+ | '++'
+ | '--'
+ )*
+ ;
+
+macro_parameter_list
+ : parameter_declaration (',' parameter_declaration)*
+ ;
+
+unary_operator
+ : '&'
+ | '*'
+ | '+'
+ | '-'
+ | '~'
+ | '!'
+ ;
+
+primary_expression
+ : IDENTIFIER
+ | constant
+ | '(' expression ')'
+ ;
+
+constant
+ : HEX_LITERAL
+ | OCTAL_LITERAL
+ | DECIMAL_LITERAL
+ | CHARACTER_LITERAL
+ | (IDENTIFIER* STRING_LITERAL+)+ IDENTIFIER*
+ | FLOATING_POINT_LITERAL
+ ;
+
+/////
+
+expression
+ : assignment_expression (',' assignment_expression)*
+ ;
+
+constant_expression
+ : conditional_expression
+ ;
+
+assignment_expression
+ : lvalue assignment_operator assignment_expression
+ | conditional_expression
+ ;
+
+lvalue
+ : unary_expression
+ ;
+
+assignment_operator
+ : '='
+ | '*='
+ | '/='
+ | '%='
+ | '+='
+ | '-='
+ | '<<='
+ | '>>='
+ | '&='
+ | '^='
+ | '|='
+ ;
+
+conditional_expression
+ : e=logical_or_expression ('?' expression ':' conditional_expression {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)})?
+ ;
+
+logical_or_expression
+ : logical_and_expression ('||' logical_and_expression)*
+ ;
+
+logical_and_expression
+ : inclusive_or_expression ('&&' inclusive_or_expression)*
+ ;
+
+inclusive_or_expression
+ : exclusive_or_expression ('|' exclusive_or_expression)*
+ ;
+
+exclusive_or_expression
+ : and_expression ('^' and_expression)*
+ ;
+
+and_expression
+ : equality_expression ('&' equality_expression)*
+ ;
+equality_expression
+ : relational_expression (('=='|'!=') relational_expression )*
+ ;
+
+relational_expression
+ : shift_expression (('<'|'>'|'<='|'>=') shift_expression)*
+ ;
+
+shift_expression
+ : additive_expression (('<<'|'>>') additive_expression)*
+ ;
+
+// S t a t e m e n t s
+
+statement
+ : labeled_statement
+ | compound_statement
+ | expression_statement
+ | selection_statement
+ | iteration_statement
+ | jump_statement
+ | macro_statement
+ | asm2_statement
+ | asm1_statement
+ | asm_statement
+ | declaration
+ ;
+
+asm2_statement
+ : '__asm__'? IDENTIFIER '(' (~(';'))* ')' ';'
+ ;
+
+asm1_statement
+ : '_asm' '{' (~('}'))* '}'
+ ;
+
+asm_statement
+ : '__asm' '{' (~('}'))* '}'
+ ;
+
+macro_statement
+ : IDENTIFIER '(' declaration* statement_list? expression? ')'
+ ;
+
+labeled_statement
+ : IDENTIFIER ':' statement
+ | 'case' constant_expression ':' statement
+ | 'default' ':' statement
+ ;
+
+compound_statement
+ : '{' declaration* statement_list? '}'
+ ;
+
+statement_list
+ : statement+
+ ;
+
+expression_statement
+ : ';'
+ | expression ';'
+ ;
+
+selection_statement
+ : 'if' '(' e=expression ')' {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)} statement (:'else' statement)?
+ | 'switch' '(' expression ')' statement
+ ;
+
+iteration_statement
+ : 'while' '(' e=expression ')' statement {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)}
+ | 'do' statement 'while' '(' e=expression ')' ';' {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)}
+ //| 'for' '(' expression_statement e=expression_statement expression? ')' statement {self.StorePredicateExpression($e.start.line, $e.start.column, $e.stop.line, $e.stop.column, $e.text)}
+ ;
+
+jump_statement
+ : 'goto' IDENTIFIER ';'
+ | 'continue' ';'
+ | 'break' ';'
+ | 'return' ';'
+ | 'return' expression ';'
+ ;
+
+IDENTIFIER
+ : LETTER (LETTER|'0'..'9')*
+ ;
+
+fragment
+LETTER
+ : '$'
+ | 'A'..'Z'
+ | 'a'..'z'
+ | '_'
+ ;
+
+CHARACTER_LITERAL
+ : ('L')? '\'' ( EscapeSequence | ~('\''|'\\') ) '\''
+ ;
+
+STRING_LITERAL
+ : ('L')? '"' ( EscapeSequence | ~('\\'|'"') )* '"'
+ ;
+
+HEX_LITERAL : '0' ('x'|'X') HexDigit+ IntegerTypeSuffix? ;
+
+DECIMAL_LITERAL : ('0' | '1'..'9' '0'..'9'*) IntegerTypeSuffix? ;
+
+OCTAL_LITERAL : '0' ('0'..'7')+ IntegerTypeSuffix? ;
+
+fragment
+HexDigit : ('0'..'9'|'a'..'f'|'A'..'F') ;
+
+fragment
+IntegerTypeSuffix
+ : ('u'|'U')
+ | ('l'|'L')
+ | ('u'|'U') ('l'|'L')
+ | ('u'|'U') ('l'|'L') ('l'|'L')
+ ;
+
+FLOATING_POINT_LITERAL
+ : ('0'..'9')+ '.' ('0'..'9')* Exponent? FloatTypeSuffix?
+ | '.' ('0'..'9')+ Exponent? FloatTypeSuffix?
+ | ('0'..'9')+ Exponent FloatTypeSuffix?
+ | ('0'..'9')+ Exponent? FloatTypeSuffix
+ ;
+
+fragment
+Exponent : ('e'|'E') ('+'|'-')? ('0'..'9')+ ;
+
+fragment
+FloatTypeSuffix : ('f'|'F'|'d'|'D') ;
+
+fragment
+EscapeSequence
+ : '\\' ('b'|'t'|'n'|'f'|'r'|'\''|'\\')
+ | OctalEscape
+ ;
+
+fragment
+OctalEscape
+ : '\\' ('0'..'3') ('0'..'7') ('0'..'7')
+ | '\\' ('0'..'7') ('0'..'7')
+ | '\\' ('0'..'7')
+ ;
+
+fragment
+UnicodeEscape
+ : '\\' 'u' HexDigit HexDigit HexDigit HexDigit
+ ;
+
+WS : (' '|'\r'|'\t'|'\u000C'|'\n')
+ -> channel(HIDDEN)
+ ;
+
+// ingore '\' of line concatenation
+BS : ('\\')
+ -> channel(HIDDEN)
+ ;
+
+UnicodeVocabulary
+ : '\u0003'..'\uFFFE'
+ ;
+
+COMMENT
+ : '/*' .*? '*/'
+ -> channel(HIDDEN)
+ ;
+
+LINE_COMMENT
+ : '//' ~('\n'|'\r')* '\r'? '\n'
+ -> channel(HIDDEN)
+ ;
+
+// ignore #line info for now
+LINE_COMMAND
+ : '#' ~('\n'|'\r')* '\r'? '\n'
+ -> channel(HIDDEN)
+ ;
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/CLexer.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/CLexer.py
new file mode 100755
index 00000000..24d7bf13
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/CLexer.py
@@ -0,0 +1,626 @@
+# Generated from C.g4 by ANTLR 4.7.1
+from antlr4 import *
+from io import StringIO
+from typing.io import TextIO
+import sys
+
+
+## @file
+# The file defines the parser for C source files.
+#
+# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+import Ecc.CodeFragment as CodeFragment
+import Ecc.FileProfile as FileProfile
+
+def serializedATN():
+ with StringIO() as buf:
+ buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2k")
+ buf.write("\u0383\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
+ buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
+ buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
+ buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
+ buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
+ buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%")
+ buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.")
+ buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64")
+ buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:")
+ buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t")
+ buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t")
+ buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t")
+ buf.write("U\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4")
+ buf.write("^\t^\4_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4")
+ buf.write("g\tg\4h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4")
+ buf.write("p\tp\4q\tq\4r\tr\3\2\3\2\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3")
+ buf.write("\4\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7")
+ buf.write("\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\n\3")
+ buf.write("\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13")
+ buf.write("\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\16")
+ buf.write("\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\20\3\20")
+ buf.write("\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22")
+ buf.write("\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23")
+ buf.write("\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25")
+ buf.write("\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27")
+ buf.write("\3\27\3\27\3\27\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\32")
+ buf.write("\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\33")
+ buf.write("\3\33\3\33\3\33\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\36")
+ buf.write("\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37")
+ buf.write("\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3!\3!\3")
+ buf.write("!\3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3")
+ buf.write("\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"")
+ buf.write("\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#")
+ buf.write("\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3%\3")
+ buf.write("%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3")
+ buf.write("&\3&\3&\3&\3&\3&\3&\3\'\3\'\3(\3(\3)\3)\3*\3*\3+\3+\3")
+ buf.write(",\3,\3,\3,\3-\3-\3.\3.\3/\3/\3\60\3\60\3\61\3\61\3\61")
+ buf.write("\3\62\3\62\3\62\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\64")
+ buf.write("\3\64\3\65\3\65\3\65\3\66\3\66\3\67\3\67\38\38\39\39\3")
+ buf.write("9\3:\3:\3:\3;\3;\3;\3<\3<\3<\3=\3=\3=\3>\3>\3>\3>\3?\3")
+ buf.write("?\3?\3?\3@\3@\3@\3A\3A\3A\3B\3B\3B\3C\3C\3D\3D\3D\3E\3")
+ buf.write("E\3E\3F\3F\3G\3G\3H\3H\3H\3I\3I\3I\3J\3J\3K\3K\3L\3L\3")
+ buf.write("L\3M\3M\3M\3N\3N\3N\3O\3O\3O\3P\3P\3P\3P\3P\3P\3P\3P\3")
+ buf.write("Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3R\3R\3S\3S\3S\3S\3S\3T\3T\3")
+ buf.write("T\3T\3T\3T\3T\3T\3U\3U\3U\3V\3V\3V\3V\3V\3W\3W\3W\3W\3")
+ buf.write("W\3W\3W\3X\3X\3X\3X\3X\3X\3Y\3Y\3Y\3Z\3Z\3Z\3Z\3Z\3[\3")
+ buf.write("[\3[\3[\3[\3[\3[\3[\3[\3\\\3\\\3\\\3\\\3\\\3\\\3]\3]\3")
+ buf.write("]\3]\3]\3]\3]\3^\3^\3^\7^\u02b2\n^\f^\16^\u02b5\13^\3")
+ buf.write("_\3_\3`\5`\u02ba\n`\3`\3`\3`\5`\u02bf\n`\3`\3`\3a\5a\u02c4")
+ buf.write("\na\3a\3a\3a\7a\u02c9\na\fa\16a\u02cc\13a\3a\3a\3b\3b")
+ buf.write("\3b\6b\u02d3\nb\rb\16b\u02d4\3b\5b\u02d8\nb\3c\3c\3c\7")
+ buf.write("c\u02dd\nc\fc\16c\u02e0\13c\5c\u02e2\nc\3c\5c\u02e5\n")
+ buf.write("c\3d\3d\6d\u02e9\nd\rd\16d\u02ea\3d\5d\u02ee\nd\3e\3e")
+ buf.write("\3f\3f\3f\3f\3f\3f\5f\u02f8\nf\3g\6g\u02fb\ng\rg\16g\u02fc")
+ buf.write("\3g\3g\7g\u0301\ng\fg\16g\u0304\13g\3g\5g\u0307\ng\3g")
+ buf.write("\5g\u030a\ng\3g\3g\6g\u030e\ng\rg\16g\u030f\3g\5g\u0313")
+ buf.write("\ng\3g\5g\u0316\ng\3g\6g\u0319\ng\rg\16g\u031a\3g\3g\5")
+ buf.write("g\u031f\ng\3g\6g\u0322\ng\rg\16g\u0323\3g\5g\u0327\ng")
+ buf.write("\3g\5g\u032a\ng\3h\3h\5h\u032e\nh\3h\6h\u0331\nh\rh\16")
+ buf.write("h\u0332\3i\3i\3j\3j\3j\5j\u033a\nj\3k\3k\3k\3k\3k\3k\3")
+ buf.write("k\3k\3k\5k\u0345\nk\3l\3l\3l\3l\3l\3l\3l\3m\3m\3m\3m\3")
+ buf.write("n\3n\3n\3n\3o\3o\3p\3p\3p\3p\7p\u035c\np\fp\16p\u035f")
+ buf.write("\13p\3p\3p\3p\3p\3p\3q\3q\3q\3q\7q\u036a\nq\fq\16q\u036d")
+ buf.write("\13q\3q\5q\u0370\nq\3q\3q\3q\3q\3r\3r\7r\u0378\nr\fr\16")
+ buf.write("r\u037b\13r\3r\5r\u037e\nr\3r\3r\3r\3r\3\u035d\2s\3\3")
+ buf.write("\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16")
+ buf.write("\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61")
+ buf.write("\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*")
+ buf.write("S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u<w")
+ buf.write("=y>{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089F\u008b")
+ buf.write("G\u008dH\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009b")
+ buf.write("O\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00ab")
+ buf.write("W\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb")
+ buf.write("_\u00bd\2\u00bf`\u00c1a\u00c3b\u00c5c\u00c7d\u00c9\2\u00cb")
+ buf.write("\2\u00cde\u00cf\2\u00d1\2\u00d3\2\u00d5\2\u00d7\2\u00d9")
+ buf.write("f\u00dbg\u00ddh\u00dfi\u00e1j\u00e3k\3\2\20\6\2&&C\\a")
+ buf.write("ac|\4\2))^^\4\2$$^^\4\2ZZzz\5\2\62;CHch\6\2NNWWnnww\4")
+ buf.write("\2WWww\4\2NNnn\4\2GGgg\4\2--//\6\2FFHHffhh\t\2))^^ddh")
+ buf.write("hppttvv\5\2\13\f\16\17\"\"\4\2\f\f\17\17\2\u03a2\2\3\3")
+ buf.write("\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2")
+ buf.write("\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2")
+ buf.write("\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2")
+ buf.write("\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2")
+ buf.write("\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3")
+ buf.write("\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2")
+ buf.write("\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3")
+ buf.write("\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K")
+ buf.write("\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2")
+ buf.write("U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2")
+ buf.write("\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2")
+ buf.write("\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2")
+ buf.write("\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2y\3\2\2\2\2{\3")
+ buf.write("\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3\2\2\2\2\u0083")
+ buf.write("\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2")
+ buf.write("\2\2\u008b\3\2\2\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091")
+ buf.write("\3\2\2\2\2\u0093\3\2\2\2\2\u0095\3\2\2\2\2\u0097\3\2\2")
+ buf.write("\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\2\u009d\3\2\2\2\2\u009f")
+ buf.write("\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2")
+ buf.write("\2\2\u00a7\3\2\2\2\2\u00a9\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad")
+ buf.write("\3\2\2\2\2\u00af\3\2\2\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2")
+ buf.write("\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2\2\2\u00b9\3\2\2\2\2\u00bb")
+ buf.write("\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2")
+ buf.write("\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00cd\3\2\2\2\2\u00d9")
+ buf.write("\3\2\2\2\2\u00db\3\2\2\2\2\u00dd\3\2\2\2\2\u00df\3\2\2")
+ buf.write("\2\2\u00e1\3\2\2\2\2\u00e3\3\2\2\2\3\u00e5\3\2\2\2\5\u00e7")
+ buf.write("\3\2\2\2\7\u00e9\3\2\2\2\t\u00f1\3\2\2\2\13\u00f3\3\2")
+ buf.write("\2\2\r\u00f5\3\2\2\2\17\u00fc\3\2\2\2\21\u0103\3\2\2\2")
+ buf.write("\23\u0108\3\2\2\2\25\u0111\3\2\2\2\27\u0118\3\2\2\2\31")
+ buf.write("\u011d\3\2\2\2\33\u0122\3\2\2\2\35\u0128\3\2\2\2\37\u012c")
+ buf.write("\3\2\2\2!\u0131\3\2\2\2#\u0137\3\2\2\2%\u013e\3\2\2\2")
+ buf.write("\'\u0145\3\2\2\2)\u014e\3\2\2\2+\u0150\3\2\2\2-\u0157")
+ buf.write("\3\2\2\2/\u015d\3\2\2\2\61\u015f\3\2\2\2\63\u0164\3\2")
+ buf.write("\2\2\65\u016a\3\2\2\2\67\u0173\3\2\2\29\u0176\3\2\2\2")
+ buf.write(";\u017a\3\2\2\2=\u0183\3\2\2\2?\u0189\3\2\2\2A\u0193\3")
+ buf.write("\2\2\2C\u019c\3\2\2\2E\u01ba\3\2\2\2G\u01c1\3\2\2\2I\u01d1")
+ buf.write("\3\2\2\2K\u01e4\3\2\2\2M\u01eb\3\2\2\2O\u01ed\3\2\2\2")
+ buf.write("Q\u01ef\3\2\2\2S\u01f1\3\2\2\2U\u01f3\3\2\2\2W\u01f5\3")
+ buf.write("\2\2\2Y\u01f9\3\2\2\2[\u01fb\3\2\2\2]\u01fd\3\2\2\2_\u01ff")
+ buf.write("\3\2\2\2a\u0201\3\2\2\2c\u0204\3\2\2\2e\u0207\3\2\2\2")
+ buf.write("g\u020e\3\2\2\2i\u0210\3\2\2\2k\u0213\3\2\2\2m\u0215\3")
+ buf.write("\2\2\2o\u0217\3\2\2\2q\u0219\3\2\2\2s\u021c\3\2\2\2u\u021f")
+ buf.write("\3\2\2\2w\u0222\3\2\2\2y\u0225\3\2\2\2{\u0228\3\2\2\2")
+ buf.write("}\u022c\3\2\2\2\177\u0230\3\2\2\2\u0081\u0233\3\2\2\2")
+ buf.write("\u0083\u0236\3\2\2\2\u0085\u0239\3\2\2\2\u0087\u023b\3")
+ buf.write("\2\2\2\u0089\u023e\3\2\2\2\u008b\u0241\3\2\2\2\u008d\u0243")
+ buf.write("\3\2\2\2\u008f\u0245\3\2\2\2\u0091\u0248\3\2\2\2\u0093")
+ buf.write("\u024b\3\2\2\2\u0095\u024d\3\2\2\2\u0097\u024f\3\2\2\2")
+ buf.write("\u0099\u0252\3\2\2\2\u009b\u0255\3\2\2\2\u009d\u0258\3")
+ buf.write("\2\2\2\u009f\u025b\3\2\2\2\u00a1\u0263\3\2\2\2\u00a3\u0268")
+ buf.write("\3\2\2\2\u00a5\u026e\3\2\2\2\u00a7\u0273\3\2\2\2\u00a9")
+ buf.write("\u027b\3\2\2\2\u00ab\u027e\3\2\2\2\u00ad\u0283\3\2\2\2")
+ buf.write("\u00af\u028a\3\2\2\2\u00b1\u0290\3\2\2\2\u00b3\u0293\3")
+ buf.write("\2\2\2\u00b5\u0298\3\2\2\2\u00b7\u02a1\3\2\2\2\u00b9\u02a7")
+ buf.write("\3\2\2\2\u00bb\u02ae\3\2\2\2\u00bd\u02b6\3\2\2\2\u00bf")
+ buf.write("\u02b9\3\2\2\2\u00c1\u02c3\3\2\2\2\u00c3\u02cf\3\2\2\2")
+ buf.write("\u00c5\u02e1\3\2\2\2\u00c7\u02e6\3\2\2\2\u00c9\u02ef\3")
+ buf.write("\2\2\2\u00cb\u02f7\3\2\2\2\u00cd\u0329\3\2\2\2\u00cf\u032b")
+ buf.write("\3\2\2\2\u00d1\u0334\3\2\2\2\u00d3\u0339\3\2\2\2\u00d5")
+ buf.write("\u0344\3\2\2\2\u00d7\u0346\3\2\2\2\u00d9\u034d\3\2\2\2")
+ buf.write("\u00db\u0351\3\2\2\2\u00dd\u0355\3\2\2\2\u00df\u0357\3")
+ buf.write("\2\2\2\u00e1\u0365\3\2\2\2\u00e3\u0375\3\2\2\2\u00e5\u00e6")
+ buf.write("\7}\2\2\u00e6\4\3\2\2\2\u00e7\u00e8\7=\2\2\u00e8\6\3\2")
+ buf.write("\2\2\u00e9\u00ea\7v\2\2\u00ea\u00eb\7{\2\2\u00eb\u00ec")
+ buf.write("\7r\2\2\u00ec\u00ed\7g\2\2\u00ed\u00ee\7f\2\2\u00ee\u00ef")
+ buf.write("\7g\2\2\u00ef\u00f0\7h\2\2\u00f0\b\3\2\2\2\u00f1\u00f2")
+ buf.write("\7.\2\2\u00f2\n\3\2\2\2\u00f3\u00f4\7?\2\2\u00f4\f\3\2")
+ buf.write("\2\2\u00f5\u00f6\7g\2\2\u00f6\u00f7\7z\2\2\u00f7\u00f8")
+ buf.write("\7v\2\2\u00f8\u00f9\7g\2\2\u00f9\u00fa\7t\2\2\u00fa\u00fb")
+ buf.write("\7p\2\2\u00fb\16\3\2\2\2\u00fc\u00fd\7u\2\2\u00fd\u00fe")
+ buf.write("\7v\2\2\u00fe\u00ff\7c\2\2\u00ff\u0100\7v\2\2\u0100\u0101")
+ buf.write("\7k\2\2\u0101\u0102\7e\2\2\u0102\20\3\2\2\2\u0103\u0104")
+ buf.write("\7c\2\2\u0104\u0105\7w\2\2\u0105\u0106\7v\2\2\u0106\u0107")
+ buf.write("\7q\2\2\u0107\22\3\2\2\2\u0108\u0109\7t\2\2\u0109\u010a")
+ buf.write("\7g\2\2\u010a\u010b\7i\2\2\u010b\u010c\7k\2\2\u010c\u010d")
+ buf.write("\7u\2\2\u010d\u010e\7v\2\2\u010e\u010f\7g\2\2\u010f\u0110")
+ buf.write("\7t\2\2\u0110\24\3\2\2\2\u0111\u0112\7U\2\2\u0112\u0113")
+ buf.write("\7V\2\2\u0113\u0114\7C\2\2\u0114\u0115\7V\2\2\u0115\u0116")
+ buf.write("\7K\2\2\u0116\u0117\7E\2\2\u0117\26\3\2\2\2\u0118\u0119")
+ buf.write("\7x\2\2\u0119\u011a\7q\2\2\u011a\u011b\7k\2\2\u011b\u011c")
+ buf.write("\7f\2\2\u011c\30\3\2\2\2\u011d\u011e\7e\2\2\u011e\u011f")
+ buf.write("\7j\2\2\u011f\u0120\7c\2\2\u0120\u0121\7t\2\2\u0121\32")
+ buf.write("\3\2\2\2\u0122\u0123\7u\2\2\u0123\u0124\7j\2\2\u0124\u0125")
+ buf.write("\7q\2\2\u0125\u0126\7t\2\2\u0126\u0127\7v\2\2\u0127\34")
+ buf.write("\3\2\2\2\u0128\u0129\7k\2\2\u0129\u012a\7p\2\2\u012a\u012b")
+ buf.write("\7v\2\2\u012b\36\3\2\2\2\u012c\u012d\7n\2\2\u012d\u012e")
+ buf.write("\7q\2\2\u012e\u012f\7p\2\2\u012f\u0130\7i\2\2\u0130 \3")
+ buf.write("\2\2\2\u0131\u0132\7h\2\2\u0132\u0133\7n\2\2\u0133\u0134")
+ buf.write("\7q\2\2\u0134\u0135\7c\2\2\u0135\u0136\7v\2\2\u0136\"")
+ buf.write("\3\2\2\2\u0137\u0138\7f\2\2\u0138\u0139\7q\2\2\u0139\u013a")
+ buf.write("\7w\2\2\u013a\u013b\7d\2\2\u013b\u013c\7n\2\2\u013c\u013d")
+ buf.write("\7g\2\2\u013d$\3\2\2\2\u013e\u013f\7u\2\2\u013f\u0140")
+ buf.write("\7k\2\2\u0140\u0141\7i\2\2\u0141\u0142\7p\2\2\u0142\u0143")
+ buf.write("\7g\2\2\u0143\u0144\7f\2\2\u0144&\3\2\2\2\u0145\u0146")
+ buf.write("\7w\2\2\u0146\u0147\7p\2\2\u0147\u0148\7u\2\2\u0148\u0149")
+ buf.write("\7k\2\2\u0149\u014a\7i\2\2\u014a\u014b\7p\2\2\u014b\u014c")
+ buf.write("\7g\2\2\u014c\u014d\7f\2\2\u014d(\3\2\2\2\u014e\u014f")
+ buf.write("\7\177\2\2\u014f*\3\2\2\2\u0150\u0151\7u\2\2\u0151\u0152")
+ buf.write("\7v\2\2\u0152\u0153\7t\2\2\u0153\u0154\7w\2\2\u0154\u0155")
+ buf.write("\7e\2\2\u0155\u0156\7v\2\2\u0156,\3\2\2\2\u0157\u0158")
+ buf.write("\7w\2\2\u0158\u0159\7p\2\2\u0159\u015a\7k\2\2\u015a\u015b")
+ buf.write("\7q\2\2\u015b\u015c\7p\2\2\u015c.\3\2\2\2\u015d\u015e")
+ buf.write("\7<\2\2\u015e\60\3\2\2\2\u015f\u0160\7g\2\2\u0160\u0161")
+ buf.write("\7p\2\2\u0161\u0162\7w\2\2\u0162\u0163\7o\2\2\u0163\62")
+ buf.write("\3\2\2\2\u0164\u0165\7e\2\2\u0165\u0166\7q\2\2\u0166\u0167")
+ buf.write("\7p\2\2\u0167\u0168\7u\2\2\u0168\u0169\7v\2\2\u0169\64")
+ buf.write("\3\2\2\2\u016a\u016b\7x\2\2\u016b\u016c\7q\2\2\u016c\u016d")
+ buf.write("\7n\2\2\u016d\u016e\7c\2\2\u016e\u016f\7v\2\2\u016f\u0170")
+ buf.write("\7k\2\2\u0170\u0171\7n\2\2\u0171\u0172\7g\2\2\u0172\66")
+ buf.write("\3\2\2\2\u0173\u0174\7K\2\2\u0174\u0175\7P\2\2\u01758")
+ buf.write("\3\2\2\2\u0176\u0177\7Q\2\2\u0177\u0178\7W\2\2\u0178\u0179")
+ buf.write("\7V\2\2\u0179:\3\2\2\2\u017a\u017b\7Q\2\2\u017b\u017c")
+ buf.write("\7R\2\2\u017c\u017d\7V\2\2\u017d\u017e\7K\2\2\u017e\u017f")
+ buf.write("\7Q\2\2\u017f\u0180\7P\2\2\u0180\u0181\7C\2\2\u0181\u0182")
+ buf.write("\7N\2\2\u0182<\3\2\2\2\u0183\u0184\7E\2\2\u0184\u0185")
+ buf.write("\7Q\2\2\u0185\u0186\7P\2\2\u0186\u0187\7U\2\2\u0187\u0188")
+ buf.write("\7V\2\2\u0188>\3\2\2\2\u0189\u018a\7W\2\2\u018a\u018b")
+ buf.write("\7P\2\2\u018b\u018c\7C\2\2\u018c\u018d\7N\2\2\u018d\u018e")
+ buf.write("\7K\2\2\u018e\u018f\7I\2\2\u018f\u0190\7P\2\2\u0190\u0191")
+ buf.write("\7G\2\2\u0191\u0192\7F\2\2\u0192@\3\2\2\2\u0193\u0194")
+ buf.write("\7X\2\2\u0194\u0195\7Q\2\2\u0195\u0196\7N\2\2\u0196\u0197")
+ buf.write("\7C\2\2\u0197\u0198\7V\2\2\u0198\u0199\7K\2\2\u0199\u019a")
+ buf.write("\7N\2\2\u019a\u019b\7G\2\2\u019bB\3\2\2\2\u019c\u019d")
+ buf.write("\7I\2\2\u019d\u019e\7N\2\2\u019e\u019f\7Q\2\2\u019f\u01a0")
+ buf.write("\7D\2\2\u01a0\u01a1\7C\2\2\u01a1\u01a2\7N\2\2\u01a2\u01a3")
+ buf.write("\7a\2\2\u01a3\u01a4\7T\2\2\u01a4\u01a5\7G\2\2\u01a5\u01a6")
+ buf.write("\7O\2\2\u01a6\u01a7\7Q\2\2\u01a7\u01a8\7X\2\2\u01a8\u01a9")
+ buf.write("\7G\2\2\u01a9\u01aa\7a\2\2\u01aa\u01ab\7K\2\2\u01ab\u01ac")
+ buf.write("\7H\2\2\u01ac\u01ad\7a\2\2\u01ad\u01ae\7W\2\2\u01ae\u01af")
+ buf.write("\7P\2\2\u01af\u01b0\7T\2\2\u01b0\u01b1\7G\2\2\u01b1\u01b2")
+ buf.write("\7H\2\2\u01b2\u01b3\7G\2\2\u01b3\u01b4\7T\2\2\u01b4\u01b5")
+ buf.write("\7G\2\2\u01b5\u01b6\7P\2\2\u01b6\u01b7\7E\2\2\u01b7\u01b8")
+ buf.write("\7G\2\2\u01b8\u01b9\7F\2\2\u01b9D\3\2\2\2\u01ba\u01bb")
+ buf.write("\7G\2\2\u01bb\u01bc\7H\2\2\u01bc\u01bd\7K\2\2\u01bd\u01be")
+ buf.write("\7C\2\2\u01be\u01bf\7R\2\2\u01bf\u01c0\7K\2\2\u01c0F\3")
+ buf.write("\2\2\2\u01c1\u01c2\7G\2\2\u01c2\u01c3\7H\2\2\u01c3\u01c4")
+ buf.write("\7K\2\2\u01c4\u01c5\7a\2\2\u01c5\u01c6\7D\2\2\u01c6\u01c7")
+ buf.write("\7Q\2\2\u01c7\u01c8\7Q\2\2\u01c8\u01c9\7V\2\2\u01c9\u01ca")
+ buf.write("\7U\2\2\u01ca\u01cb\7G\2\2\u01cb\u01cc\7T\2\2\u01cc\u01cd")
+ buf.write("\7X\2\2\u01cd\u01ce\7K\2\2\u01ce\u01cf\7E\2\2\u01cf\u01d0")
+ buf.write("\7G\2\2\u01d0H\3\2\2\2\u01d1\u01d2\7G\2\2\u01d2\u01d3")
+ buf.write("\7H\2\2\u01d3\u01d4\7K\2\2\u01d4\u01d5\7a\2\2\u01d5\u01d6")
+ buf.write("\7T\2\2\u01d6\u01d7\7W\2\2\u01d7\u01d8\7P\2\2\u01d8\u01d9")
+ buf.write("\7V\2\2\u01d9\u01da\7K\2\2\u01da\u01db\7O\2\2\u01db\u01dc")
+ buf.write("\7G\2\2\u01dc\u01dd\7U\2\2\u01dd\u01de\7G\2\2\u01de\u01df")
+ buf.write("\7T\2\2\u01df\u01e0\7X\2\2\u01e0\u01e1\7K\2\2\u01e1\u01e2")
+ buf.write("\7E\2\2\u01e2\u01e3\7G\2\2\u01e3J\3\2\2\2\u01e4\u01e5")
+ buf.write("\7R\2\2\u01e5\u01e6\7C\2\2\u01e6\u01e7\7E\2\2\u01e7\u01e8")
+ buf.write("\7M\2\2\u01e8\u01e9\7G\2\2\u01e9\u01ea\7F\2\2\u01eaL\3")
+ buf.write("\2\2\2\u01eb\u01ec\7*\2\2\u01ecN\3\2\2\2\u01ed\u01ee\7")
+ buf.write("+\2\2\u01eeP\3\2\2\2\u01ef\u01f0\7]\2\2\u01f0R\3\2\2\2")
+ buf.write("\u01f1\u01f2\7_\2\2\u01f2T\3\2\2\2\u01f3\u01f4\7,\2\2")
+ buf.write("\u01f4V\3\2\2\2\u01f5\u01f6\7\60\2\2\u01f6\u01f7\7\60")
+ buf.write("\2\2\u01f7\u01f8\7\60\2\2\u01f8X\3\2\2\2\u01f9\u01fa\7")
+ buf.write("-\2\2\u01faZ\3\2\2\2\u01fb\u01fc\7/\2\2\u01fc\\\3\2\2")
+ buf.write("\2\u01fd\u01fe\7\61\2\2\u01fe^\3\2\2\2\u01ff\u0200\7\'")
+ buf.write("\2\2\u0200`\3\2\2\2\u0201\u0202\7-\2\2\u0202\u0203\7-")
+ buf.write("\2\2\u0203b\3\2\2\2\u0204\u0205\7/\2\2\u0205\u0206\7/")
+ buf.write("\2\2\u0206d\3\2\2\2\u0207\u0208\7u\2\2\u0208\u0209\7k")
+ buf.write("\2\2\u0209\u020a\7|\2\2\u020a\u020b\7g\2\2\u020b\u020c")
+ buf.write("\7q\2\2\u020c\u020d\7h\2\2\u020df\3\2\2\2\u020e\u020f")
+ buf.write("\7\60\2\2\u020fh\3\2\2\2\u0210\u0211\7/\2\2\u0211\u0212")
+ buf.write("\7@\2\2\u0212j\3\2\2\2\u0213\u0214\7(\2\2\u0214l\3\2\2")
+ buf.write("\2\u0215\u0216\7\u0080\2\2\u0216n\3\2\2\2\u0217\u0218")
+ buf.write("\7#\2\2\u0218p\3\2\2\2\u0219\u021a\7,\2\2\u021a\u021b")
+ buf.write("\7?\2\2\u021br\3\2\2\2\u021c\u021d\7\61\2\2\u021d\u021e")
+ buf.write("\7?\2\2\u021et\3\2\2\2\u021f\u0220\7\'\2\2\u0220\u0221")
+ buf.write("\7?\2\2\u0221v\3\2\2\2\u0222\u0223\7-\2\2\u0223\u0224")
+ buf.write("\7?\2\2\u0224x\3\2\2\2\u0225\u0226\7/\2\2\u0226\u0227")
+ buf.write("\7?\2\2\u0227z\3\2\2\2\u0228\u0229\7>\2\2\u0229\u022a")
+ buf.write("\7>\2\2\u022a\u022b\7?\2\2\u022b|\3\2\2\2\u022c\u022d")
+ buf.write("\7@\2\2\u022d\u022e\7@\2\2\u022e\u022f\7?\2\2\u022f~\3")
+ buf.write("\2\2\2\u0230\u0231\7(\2\2\u0231\u0232\7?\2\2\u0232\u0080")
+ buf.write("\3\2\2\2\u0233\u0234\7`\2\2\u0234\u0235\7?\2\2\u0235\u0082")
+ buf.write("\3\2\2\2\u0236\u0237\7~\2\2\u0237\u0238\7?\2\2\u0238\u0084")
+ buf.write("\3\2\2\2\u0239\u023a\7A\2\2\u023a\u0086\3\2\2\2\u023b")
+ buf.write("\u023c\7~\2\2\u023c\u023d\7~\2\2\u023d\u0088\3\2\2\2\u023e")
+ buf.write("\u023f\7(\2\2\u023f\u0240\7(\2\2\u0240\u008a\3\2\2\2\u0241")
+ buf.write("\u0242\7~\2\2\u0242\u008c\3\2\2\2\u0243\u0244\7`\2\2\u0244")
+ buf.write("\u008e\3\2\2\2\u0245\u0246\7?\2\2\u0246\u0247\7?\2\2\u0247")
+ buf.write("\u0090\3\2\2\2\u0248\u0249\7#\2\2\u0249\u024a\7?\2\2\u024a")
+ buf.write("\u0092\3\2\2\2\u024b\u024c\7>\2\2\u024c\u0094\3\2\2\2")
+ buf.write("\u024d\u024e\7@\2\2\u024e\u0096\3\2\2\2\u024f\u0250\7")
+ buf.write(">\2\2\u0250\u0251\7?\2\2\u0251\u0098\3\2\2\2\u0252\u0253")
+ buf.write("\7@\2\2\u0253\u0254\7?\2\2\u0254\u009a\3\2\2\2\u0255\u0256")
+ buf.write("\7>\2\2\u0256\u0257\7>\2\2\u0257\u009c\3\2\2\2\u0258\u0259")
+ buf.write("\7@\2\2\u0259\u025a\7@\2\2\u025a\u009e\3\2\2\2\u025b\u025c")
+ buf.write("\7a\2\2\u025c\u025d\7a\2\2\u025d\u025e\7c\2\2\u025e\u025f")
+ buf.write("\7u\2\2\u025f\u0260\7o\2\2\u0260\u0261\7a\2\2\u0261\u0262")
+ buf.write("\7a\2\2\u0262\u00a0\3\2\2\2\u0263\u0264\7a\2\2\u0264\u0265")
+ buf.write("\7c\2\2\u0265\u0266\7u\2\2\u0266\u0267\7o\2\2\u0267\u00a2")
+ buf.write("\3\2\2\2\u0268\u0269\7a\2\2\u0269\u026a\7a\2\2\u026a\u026b")
+ buf.write("\7c\2\2\u026b\u026c\7u\2\2\u026c\u026d\7o\2\2\u026d\u00a4")
+ buf.write("\3\2\2\2\u026e\u026f\7e\2\2\u026f\u0270\7c\2\2\u0270\u0271")
+ buf.write("\7u\2\2\u0271\u0272\7g\2\2\u0272\u00a6\3\2\2\2\u0273\u0274")
+ buf.write("\7f\2\2\u0274\u0275\7g\2\2\u0275\u0276\7h\2\2\u0276\u0277")
+ buf.write("\7c\2\2\u0277\u0278\7w\2\2\u0278\u0279\7n\2\2\u0279\u027a")
+ buf.write("\7v\2\2\u027a\u00a8\3\2\2\2\u027b\u027c\7k\2\2\u027c\u027d")
+ buf.write("\7h\2\2\u027d\u00aa\3\2\2\2\u027e\u027f\7g\2\2\u027f\u0280")
+ buf.write("\7n\2\2\u0280\u0281\7u\2\2\u0281\u0282\7g\2\2\u0282\u00ac")
+ buf.write("\3\2\2\2\u0283\u0284\7u\2\2\u0284\u0285\7y\2\2\u0285\u0286")
+ buf.write("\7k\2\2\u0286\u0287\7v\2\2\u0287\u0288\7e\2\2\u0288\u0289")
+ buf.write("\7j\2\2\u0289\u00ae\3\2\2\2\u028a\u028b\7y\2\2\u028b\u028c")
+ buf.write("\7j\2\2\u028c\u028d\7k\2\2\u028d\u028e\7n\2\2\u028e\u028f")
+ buf.write("\7g\2\2\u028f\u00b0\3\2\2\2\u0290\u0291\7f\2\2\u0291\u0292")
+ buf.write("\7q\2\2\u0292\u00b2\3\2\2\2\u0293\u0294\7i\2\2\u0294\u0295")
+ buf.write("\7q\2\2\u0295\u0296\7v\2\2\u0296\u0297\7q\2\2\u0297\u00b4")
+ buf.write("\3\2\2\2\u0298\u0299\7e\2\2\u0299\u029a\7q\2\2\u029a\u029b")
+ buf.write("\7p\2\2\u029b\u029c\7v\2\2\u029c\u029d\7k\2\2\u029d\u029e")
+ buf.write("\7p\2\2\u029e\u029f\7w\2\2\u029f\u02a0\7g\2\2\u02a0\u00b6")
+ buf.write("\3\2\2\2\u02a1\u02a2\7d\2\2\u02a2\u02a3\7t\2\2\u02a3\u02a4")
+ buf.write("\7g\2\2\u02a4\u02a5\7c\2\2\u02a5\u02a6\7m\2\2\u02a6\u00b8")
+ buf.write("\3\2\2\2\u02a7\u02a8\7t\2\2\u02a8\u02a9\7g\2\2\u02a9\u02aa")
+ buf.write("\7v\2\2\u02aa\u02ab\7w\2\2\u02ab\u02ac\7t\2\2\u02ac\u02ad")
+ buf.write("\7p\2\2\u02ad\u00ba\3\2\2\2\u02ae\u02b3\5\u00bd_\2\u02af")
+ buf.write("\u02b2\5\u00bd_\2\u02b0\u02b2\4\62;\2\u02b1\u02af\3\2")
+ buf.write("\2\2\u02b1\u02b0\3\2\2\2\u02b2\u02b5\3\2\2\2\u02b3\u02b1")
+ buf.write("\3\2\2\2\u02b3\u02b4\3\2\2\2\u02b4\u00bc\3\2\2\2\u02b5")
+ buf.write("\u02b3\3\2\2\2\u02b6\u02b7\t\2\2\2\u02b7\u00be\3\2\2\2")
+ buf.write("\u02b8\u02ba\7N\2\2\u02b9\u02b8\3\2\2\2\u02b9\u02ba\3")
+ buf.write("\2\2\2\u02ba\u02bb\3\2\2\2\u02bb\u02be\7)\2\2\u02bc\u02bf")
+ buf.write("\5\u00d3j\2\u02bd\u02bf\n\3\2\2\u02be\u02bc\3\2\2\2\u02be")
+ buf.write("\u02bd\3\2\2\2\u02bf\u02c0\3\2\2\2\u02c0\u02c1\7)\2\2")
+ buf.write("\u02c1\u00c0\3\2\2\2\u02c2\u02c4\7N\2\2\u02c3\u02c2\3")
+ buf.write("\2\2\2\u02c3\u02c4\3\2\2\2\u02c4\u02c5\3\2\2\2\u02c5\u02ca")
+ buf.write("\7$\2\2\u02c6\u02c9\5\u00d3j\2\u02c7\u02c9\n\4\2\2\u02c8")
+ buf.write("\u02c6\3\2\2\2\u02c8\u02c7\3\2\2\2\u02c9\u02cc\3\2\2\2")
+ buf.write("\u02ca\u02c8\3\2\2\2\u02ca\u02cb\3\2\2\2\u02cb\u02cd\3")
+ buf.write("\2\2\2\u02cc\u02ca\3\2\2\2\u02cd\u02ce\7$\2\2\u02ce\u00c2")
+ buf.write("\3\2\2\2\u02cf\u02d0\7\62\2\2\u02d0\u02d2\t\5\2\2\u02d1")
+ buf.write("\u02d3\5\u00c9e\2\u02d2\u02d1\3\2\2\2\u02d3\u02d4\3\2")
+ buf.write("\2\2\u02d4\u02d2\3\2\2\2\u02d4\u02d5\3\2\2\2\u02d5\u02d7")
+ buf.write("\3\2\2\2\u02d6\u02d8\5\u00cbf\2\u02d7\u02d6\3\2\2\2\u02d7")
+ buf.write("\u02d8\3\2\2\2\u02d8\u00c4\3\2\2\2\u02d9\u02e2\7\62\2")
+ buf.write("\2\u02da\u02de\4\63;\2\u02db\u02dd\4\62;\2\u02dc\u02db")
+ buf.write("\3\2\2\2\u02dd\u02e0\3\2\2\2\u02de\u02dc\3\2\2\2\u02de")
+ buf.write("\u02df\3\2\2\2\u02df\u02e2\3\2\2\2\u02e0\u02de\3\2\2\2")
+ buf.write("\u02e1\u02d9\3\2\2\2\u02e1\u02da\3\2\2\2\u02e2\u02e4\3")
+ buf.write("\2\2\2\u02e3\u02e5\5\u00cbf\2\u02e4\u02e3\3\2\2\2\u02e4")
+ buf.write("\u02e5\3\2\2\2\u02e5\u00c6\3\2\2\2\u02e6\u02e8\7\62\2")
+ buf.write("\2\u02e7\u02e9\4\629\2\u02e8\u02e7\3\2\2\2\u02e9\u02ea")
+ buf.write("\3\2\2\2\u02ea\u02e8\3\2\2\2\u02ea\u02eb\3\2\2\2\u02eb")
+ buf.write("\u02ed\3\2\2\2\u02ec\u02ee\5\u00cbf\2\u02ed\u02ec\3\2")
+ buf.write("\2\2\u02ed\u02ee\3\2\2\2\u02ee\u00c8\3\2\2\2\u02ef\u02f0")
+ buf.write("\t\6\2\2\u02f0\u00ca\3\2\2\2\u02f1\u02f8\t\7\2\2\u02f2")
+ buf.write("\u02f3\t\b\2\2\u02f3\u02f8\t\t\2\2\u02f4\u02f5\t\b\2\2")
+ buf.write("\u02f5\u02f6\t\t\2\2\u02f6\u02f8\t\t\2\2\u02f7\u02f1\3")
+ buf.write("\2\2\2\u02f7\u02f2\3\2\2\2\u02f7\u02f4\3\2\2\2\u02f8\u00cc")
+ buf.write("\3\2\2\2\u02f9\u02fb\4\62;\2\u02fa\u02f9\3\2\2\2\u02fb")
+ buf.write("\u02fc\3\2\2\2\u02fc\u02fa\3\2\2\2\u02fc\u02fd\3\2\2\2")
+ buf.write("\u02fd\u02fe\3\2\2\2\u02fe\u0302\7\60\2\2\u02ff\u0301")
+ buf.write("\4\62;\2\u0300\u02ff\3\2\2\2\u0301\u0304\3\2\2\2\u0302")
+ buf.write("\u0300\3\2\2\2\u0302\u0303\3\2\2\2\u0303\u0306\3\2\2\2")
+ buf.write("\u0304\u0302\3\2\2\2\u0305\u0307\5\u00cfh\2\u0306\u0305")
+ buf.write("\3\2\2\2\u0306\u0307\3\2\2\2\u0307\u0309\3\2\2\2\u0308")
+ buf.write("\u030a\5\u00d1i\2\u0309\u0308\3\2\2\2\u0309\u030a\3\2")
+ buf.write("\2\2\u030a\u032a\3\2\2\2\u030b\u030d\7\60\2\2\u030c\u030e")
+ buf.write("\4\62;\2\u030d\u030c\3\2\2\2\u030e\u030f\3\2\2\2\u030f")
+ buf.write("\u030d\3\2\2\2\u030f\u0310\3\2\2\2\u0310\u0312\3\2\2\2")
+ buf.write("\u0311\u0313\5\u00cfh\2\u0312\u0311\3\2\2\2\u0312\u0313")
+ buf.write("\3\2\2\2\u0313\u0315\3\2\2\2\u0314\u0316\5\u00d1i\2\u0315")
+ buf.write("\u0314\3\2\2\2\u0315\u0316\3\2\2\2\u0316\u032a\3\2\2\2")
+ buf.write("\u0317\u0319\4\62;\2\u0318\u0317\3\2\2\2\u0319\u031a\3")
+ buf.write("\2\2\2\u031a\u0318\3\2\2\2\u031a\u031b\3\2\2\2\u031b\u031c")
+ buf.write("\3\2\2\2\u031c\u031e\5\u00cfh\2\u031d\u031f\5\u00d1i\2")
+ buf.write("\u031e\u031d\3\2\2\2\u031e\u031f\3\2\2\2\u031f\u032a\3")
+ buf.write("\2\2\2\u0320\u0322\4\62;\2\u0321\u0320\3\2\2\2\u0322\u0323")
+ buf.write("\3\2\2\2\u0323\u0321\3\2\2\2\u0323\u0324\3\2\2\2\u0324")
+ buf.write("\u0326\3\2\2\2\u0325\u0327\5\u00cfh\2\u0326\u0325\3\2")
+ buf.write("\2\2\u0326\u0327\3\2\2\2\u0327\u0328\3\2\2\2\u0328\u032a")
+ buf.write("\5\u00d1i\2\u0329\u02fa\3\2\2\2\u0329\u030b\3\2\2\2\u0329")
+ buf.write("\u0318\3\2\2\2\u0329\u0321\3\2\2\2\u032a\u00ce\3\2\2\2")
+ buf.write("\u032b\u032d\t\n\2\2\u032c\u032e\t\13\2\2\u032d\u032c")
+ buf.write("\3\2\2\2\u032d\u032e\3\2\2\2\u032e\u0330\3\2\2\2\u032f")
+ buf.write("\u0331\4\62;\2\u0330\u032f\3\2\2\2\u0331\u0332\3\2\2\2")
+ buf.write("\u0332\u0330\3\2\2\2\u0332\u0333\3\2\2\2\u0333\u00d0\3")
+ buf.write("\2\2\2\u0334\u0335\t\f\2\2\u0335\u00d2\3\2\2\2\u0336\u0337")
+ buf.write("\7^\2\2\u0337\u033a\t\r\2\2\u0338\u033a\5\u00d5k\2\u0339")
+ buf.write("\u0336\3\2\2\2\u0339\u0338\3\2\2\2\u033a\u00d4\3\2\2\2")
+ buf.write("\u033b\u033c\7^\2\2\u033c\u033d\4\62\65\2\u033d\u033e")
+ buf.write("\4\629\2\u033e\u0345\4\629\2\u033f\u0340\7^\2\2\u0340")
+ buf.write("\u0341\4\629\2\u0341\u0345\4\629\2\u0342\u0343\7^\2\2")
+ buf.write("\u0343\u0345\4\629\2\u0344\u033b\3\2\2\2\u0344\u033f\3")
+ buf.write("\2\2\2\u0344\u0342\3\2\2\2\u0345\u00d6\3\2\2\2\u0346\u0347")
+ buf.write("\7^\2\2\u0347\u0348\7w\2\2\u0348\u0349\5\u00c9e\2\u0349")
+ buf.write("\u034a\5\u00c9e\2\u034a\u034b\5\u00c9e\2\u034b\u034c\5")
+ buf.write("\u00c9e\2\u034c\u00d8\3\2\2\2\u034d\u034e\t\16\2\2\u034e")
+ buf.write("\u034f\3\2\2\2\u034f\u0350\bm\2\2\u0350\u00da\3\2\2\2")
+ buf.write("\u0351\u0352\7^\2\2\u0352\u0353\3\2\2\2\u0353\u0354\b")
+ buf.write("n\2\2\u0354\u00dc\3\2\2\2\u0355\u0356\4\5\0\2\u0356\u00de")
+ buf.write("\3\2\2\2\u0357\u0358\7\61\2\2\u0358\u0359\7,\2\2\u0359")
+ buf.write("\u035d\3\2\2\2\u035a\u035c\13\2\2\2\u035b\u035a\3\2\2")
+ buf.write("\2\u035c\u035f\3\2\2\2\u035d\u035e\3\2\2\2\u035d\u035b")
+ buf.write("\3\2\2\2\u035e\u0360\3\2\2\2\u035f\u035d\3\2\2\2\u0360")
+ buf.write("\u0361\7,\2\2\u0361\u0362\7\61\2\2\u0362\u0363\3\2\2\2")
+ buf.write("\u0363\u0364\bp\2\2\u0364\u00e0\3\2\2\2\u0365\u0366\7")
+ buf.write("\61\2\2\u0366\u0367\7\61\2\2\u0367\u036b\3\2\2\2\u0368")
+ buf.write("\u036a\n\17\2\2\u0369\u0368\3\2\2\2\u036a\u036d\3\2\2")
+ buf.write("\2\u036b\u0369\3\2\2\2\u036b\u036c\3\2\2\2\u036c\u036f")
+ buf.write("\3\2\2\2\u036d\u036b\3\2\2\2\u036e\u0370\7\17\2\2\u036f")
+ buf.write("\u036e\3\2\2\2\u036f\u0370\3\2\2\2\u0370\u0371\3\2\2\2")
+ buf.write("\u0371\u0372\7\f\2\2\u0372\u0373\3\2\2\2\u0373\u0374\b")
+ buf.write("q\2\2\u0374\u00e2\3\2\2\2\u0375\u0379\7%\2\2\u0376\u0378")
+ buf.write("\n\17\2\2\u0377\u0376\3\2\2\2\u0378\u037b\3\2\2\2\u0379")
+ buf.write("\u0377\3\2\2\2\u0379\u037a\3\2\2\2\u037a\u037d\3\2\2\2")
+ buf.write("\u037b\u0379\3\2\2\2\u037c\u037e\7\17\2\2\u037d\u037c")
+ buf.write("\3\2\2\2\u037d\u037e\3\2\2\2\u037e\u037f\3\2\2\2\u037f")
+ buf.write("\u0380\7\f\2\2\u0380\u0381\3\2\2\2\u0381\u0382\br\2\2")
+ buf.write("\u0382\u00e4\3\2\2\2\'\2\u02b1\u02b3\u02b9\u02be\u02c3")
+ buf.write("\u02c8\u02ca\u02d4\u02d7\u02de\u02e1\u02e4\u02ea\u02ed")
+ buf.write("\u02f7\u02fc\u0302\u0306\u0309\u030f\u0312\u0315\u031a")
+ buf.write("\u031e\u0323\u0326\u0329\u032d\u0332\u0339\u0344\u035d")
+ buf.write("\u036b\u036f\u0379\u037d\3\2\3\2")
+ return buf.getvalue()
+
+
+class CLexer(Lexer):
+
+ atn = ATNDeserializer().deserialize(serializedATN())
+
+ decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
+
+ T__0 = 1
+ T__1 = 2
+ T__2 = 3
+ T__3 = 4
+ T__4 = 5
+ T__5 = 6
+ T__6 = 7
+ T__7 = 8
+ T__8 = 9
+ T__9 = 10
+ T__10 = 11
+ T__11 = 12
+ T__12 = 13
+ T__13 = 14
+ T__14 = 15
+ T__15 = 16
+ T__16 = 17
+ T__17 = 18
+ T__18 = 19
+ T__19 = 20
+ T__20 = 21
+ T__21 = 22
+ T__22 = 23
+ T__23 = 24
+ T__24 = 25
+ T__25 = 26
+ T__26 = 27
+ T__27 = 28
+ T__28 = 29
+ T__29 = 30
+ T__30 = 31
+ T__31 = 32
+ T__32 = 33
+ T__33 = 34
+ T__34 = 35
+ T__35 = 36
+ T__36 = 37
+ T__37 = 38
+ T__38 = 39
+ T__39 = 40
+ T__40 = 41
+ T__41 = 42
+ T__42 = 43
+ T__43 = 44
+ T__44 = 45
+ T__45 = 46
+ T__46 = 47
+ T__47 = 48
+ T__48 = 49
+ T__49 = 50
+ T__50 = 51
+ T__51 = 52
+ T__52 = 53
+ T__53 = 54
+ T__54 = 55
+ T__55 = 56
+ T__56 = 57
+ T__57 = 58
+ T__58 = 59
+ T__59 = 60
+ T__60 = 61
+ T__61 = 62
+ T__62 = 63
+ T__63 = 64
+ T__64 = 65
+ T__65 = 66
+ T__66 = 67
+ T__67 = 68
+ T__68 = 69
+ T__69 = 70
+ T__70 = 71
+ T__71 = 72
+ T__72 = 73
+ T__73 = 74
+ T__74 = 75
+ T__75 = 76
+ T__76 = 77
+ T__77 = 78
+ T__78 = 79
+ T__79 = 80
+ T__80 = 81
+ T__81 = 82
+ T__82 = 83
+ T__83 = 84
+ T__84 = 85
+ T__85 = 86
+ T__86 = 87
+ T__87 = 88
+ T__88 = 89
+ T__89 = 90
+ T__90 = 91
+ T__91 = 92
+ IDENTIFIER = 93
+ CHARACTER_LITERAL = 94
+ STRING_LITERAL = 95
+ HEX_LITERAL = 96
+ DECIMAL_LITERAL = 97
+ OCTAL_LITERAL = 98
+ FLOATING_POINT_LITERAL = 99
+ WS = 100
+ BS = 101
+ UnicodeVocabulary = 102
+ COMMENT = 103
+ LINE_COMMENT = 104
+ LINE_COMMAND = 105
+
+ channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
+
+ modeNames = [ "DEFAULT_MODE" ]
+
+ literalNames = [ "<INVALID>",
+ "'{'", "';'", "'typedef'", "','", "'='", "'extern'", "'static'",
+ "'auto'", "'register'", "'STATIC'", "'void'", "'char'", "'short'",
+ "'int'", "'long'", "'float'", "'double'", "'signed'", "'unsigned'",
+ "'}'", "'struct'", "'union'", "':'", "'enum'", "'const'", "'volatile'",
+ "'IN'", "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'", "'VOLATILE'",
+ "'GLOBAL_REMOVE_IF_UNREFERENCED'", "'EFIAPI'", "'EFI_BOOTSERVICE'",
+ "'EFI_RUNTIMESERVICE'", "'PACKED'", "'('", "')'", "'['", "']'",
+ "'*'", "'...'", "'+'", "'-'", "'/'", "'%'", "'++'", "'--'",
+ "'sizeof'", "'.'", "'->'", "'&'", "'~'", "'!'", "'*='", "'/='",
+ "'%='", "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
+ "'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='", "'<'",
+ "'>'", "'<='", "'>='", "'<<'", "'>>'", "'__asm__'", "'_asm'",
+ "'__asm'", "'case'", "'default'", "'if'", "'else'", "'switch'",
+ "'while'", "'do'", "'goto'", "'continue'", "'break'", "'return'" ]
+
+ symbolicNames = [ "<INVALID>",
+ "IDENTIFIER", "CHARACTER_LITERAL", "STRING_LITERAL", "HEX_LITERAL",
+ "DECIMAL_LITERAL", "OCTAL_LITERAL", "FLOATING_POINT_LITERAL",
+ "WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
+ "LINE_COMMAND" ]
+
+ ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
+ "T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13",
+ "T__14", "T__15", "T__16", "T__17", "T__18", "T__19",
+ "T__20", "T__21", "T__22", "T__23", "T__24", "T__25",
+ "T__26", "T__27", "T__28", "T__29", "T__30", "T__31",
+ "T__32", "T__33", "T__34", "T__35", "T__36", "T__37",
+ "T__38", "T__39", "T__40", "T__41", "T__42", "T__43",
+ "T__44", "T__45", "T__46", "T__47", "T__48", "T__49",
+ "T__50", "T__51", "T__52", "T__53", "T__54", "T__55",
+ "T__56", "T__57", "T__58", "T__59", "T__60", "T__61",
+ "T__62", "T__63", "T__64", "T__65", "T__66", "T__67",
+ "T__68", "T__69", "T__70", "T__71", "T__72", "T__73",
+ "T__74", "T__75", "T__76", "T__77", "T__78", "T__79",
+ "T__80", "T__81", "T__82", "T__83", "T__84", "T__85",
+ "T__86", "T__87", "T__88", "T__89", "T__90", "T__91",
+ "IDENTIFIER", "LETTER", "CHARACTER_LITERAL", "STRING_LITERAL",
+ "HEX_LITERAL", "DECIMAL_LITERAL", "OCTAL_LITERAL", "HexDigit",
+ "IntegerTypeSuffix", "FLOATING_POINT_LITERAL", "Exponent",
+ "FloatTypeSuffix", "EscapeSequence", "OctalEscape", "UnicodeEscape",
+ "WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
+ "LINE_COMMAND" ]
+
+ grammarFileName = "C.g4"
+
+ # @param output= sys.stdout Type: TextIO
+ def __init__(self,input=None,output= sys.stdout):
+ super().__init__(input, output)
+ self.checkVersion("4.7.1")
+ self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
+ self._actions = None
+ self._predicates = None
+
+
+
+ def printTokenInfo(self,line,offset,tokenText):
+ print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
+
+ def StorePredicateExpression(self,StartLine,StartOffset,EndLine,EndOffset,Text):
+ PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.PredicateExpressionList.append(PredExp)
+
+ def StoreEnumerationDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
+ EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.EnumerationDefinitionList.append(EnumDef)
+
+ def StoreStructUnionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
+ SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.StructUnionDefinitionList.append(SUDef)
+
+ def StoreTypedefDefinition(self,StartLine,StartOffset,EndLine,EndOffset,FromText,ToText):
+ Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.TypedefDefinitionList.append(Tdef)
+
+ def StoreFunctionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText,LeftBraceLine,LeftBraceOffset,DeclLine,DeclOffset):
+ FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
+ FileProfile.FunctionDefinitionList.append(FuncDef)
+
+ def StoreVariableDeclaration(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText):
+ VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.VariableDeclarationList.append(VarDecl)
+
+ def StoreFunctionCalling(self,StartLine,StartOffset,EndLine,EndOffset,FuncName,ParamList):
+ FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.FunctionCallingList.append(FuncCall)
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/CListener.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/CListener.py
new file mode 100755
index 00000000..2facdc58
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/CListener.py
@@ -0,0 +1,809 @@
+# Generated from C.g4 by ANTLR 4.7.1
+from antlr4 import *
+if __name__ is not None and "." in __name__:
+ from .CParser import CParser
+else:
+ from CParser import CParser
+
+## @file
+# The file defines the parser for C source files.
+#
+# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+import Ecc.CodeFragment as CodeFragment
+import Ecc.FileProfile as FileProfile
+
+
+# This class defines a complete listener for a parse tree produced by CParser.
+class CListener(ParseTreeListener):
+
+ # Enter a parse tree produced by CParser#translation_unit.
+ # @param ctx Type: CParser.Translation_unitContext
+ def enterTranslation_unit(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#translation_unit.
+ # @param ctx Type: CParser.Translation_unitContext
+ def exitTranslation_unit(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#external_declaration.
+ # @param ctx Type: CParser.External_declarationContext
+ def enterExternal_declaration(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#external_declaration.
+ # @param ctx Type: CParser.External_declarationContext
+ def exitExternal_declaration(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#function_definition.
+ # @param ctx Type: CParser.Function_definitionContext
+ def enterFunction_definition(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#function_definition.
+ # @param ctx Type: CParser.Function_definitionContext
+ def exitFunction_definition(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#declaration_specifiers.
+ # @param ctx Type: CParser.Declaration_specifiersContext
+ def enterDeclaration_specifiers(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#declaration_specifiers.
+ # @param ctx Type: CParser.Declaration_specifiersContext
+ def exitDeclaration_specifiers(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#declaration.
+ # @param ctx Type: CParser.DeclarationContext
+ def enterDeclaration(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#declaration.
+ # @param ctx Type: CParser.DeclarationContext
+ def exitDeclaration(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#init_declarator_list.
+ # @param ctx Type: CParser.Init_declarator_listContext
+ def enterInit_declarator_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#init_declarator_list.
+ # @param ctx Type: CParser.Init_declarator_listContext
+ def exitInit_declarator_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#init_declarator.
+ # @param ctx Type: CParser.Init_declaratorContext
+ def enterInit_declarator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#init_declarator.
+ # @param ctx Type: CParser.Init_declaratorContext
+ def exitInit_declarator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#storage_class_specifier.
+ # @param ctx Type: CParser.Storage_class_specifierContext
+ def enterStorage_class_specifier(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#storage_class_specifier.
+ # @param ctx Type: CParser.Storage_class_specifierContext
+ def exitStorage_class_specifier(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#type_specifier.
+ # @param ctx Type: CParser.Type_specifierContext
+ def enterType_specifier(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#type_specifier.
+ # @param ctx Type: CParser.Type_specifierContext
+ def exitType_specifier(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#type_id.
+ # @param ctx Type: CParser.Type_idContext
+ def enterType_id(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#type_id.
+ # @param ctx Type: CParser.Type_idContext
+ def exitType_id(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#struct_or_union_specifier.
+ # @param ctx Type: CParser.Struct_or_union_specifierContext
+ def enterStruct_or_union_specifier(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#struct_or_union_specifier.
+ # @param ctx Type: CParser.Struct_or_union_specifierContext
+ def exitStruct_or_union_specifier(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#struct_or_union.
+ # @param ctx Type: CParser.Struct_or_unionContext
+ def enterStruct_or_union(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#struct_or_union.
+ # @param ctx Type: CParser.Struct_or_unionContext
+ def exitStruct_or_union(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#struct_declaration_list.
+ # @param ctx Type: CParser.Struct_declaration_listContext
+ def enterStruct_declaration_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#struct_declaration_list.
+ # @param ctx Type: CParser.Struct_declaration_listContext
+ def exitStruct_declaration_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#struct_declaration.
+ # @param ctx Type: CParser.Struct_declarationContext
+ def enterStruct_declaration(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#struct_declaration.
+ # @param ctx Type: CParser.Struct_declarationContext
+ def exitStruct_declaration(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#specifier_qualifier_list.
+ # @param ctx Type: CParser.Specifier_qualifier_listContext
+ def enterSpecifier_qualifier_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#specifier_qualifier_list.
+ # @param ctx Type: CParser.Specifier_qualifier_listContext
+ def exitSpecifier_qualifier_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#struct_declarator_list.
+ # @param ctx Type: CParser.Struct_declarator_listContext
+ def enterStruct_declarator_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#struct_declarator_list.
+ # @param ctx Type: CParser.Struct_declarator_listContext
+ def exitStruct_declarator_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#struct_declarator.
+ # @param ctx Type: CParser.Struct_declaratorContext
+ def enterStruct_declarator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#struct_declarator.
+ # @param ctx Type: CParser.Struct_declaratorContext
+ def exitStruct_declarator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#enum_specifier.
+ # @param ctx Type: CParser.Enum_specifierContext
+ def enterEnum_specifier(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#enum_specifier.
+ # @param ctx Type: CParser.Enum_specifierContext
+ def exitEnum_specifier(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#enumerator_list.
+ # @param ctx Type: CParser.Enumerator_listContext
+ def enterEnumerator_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#enumerator_list.
+ # @param ctx Type: CParser.Enumerator_listContext
+ def exitEnumerator_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#enumerator.
+ # @param ctx Type: CParser.EnumeratorContext
+ def enterEnumerator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#enumerator.
+ # @param ctx Type: CParser.EnumeratorContext
+ def exitEnumerator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#type_qualifier.
+ # @param ctx Type: CParser.Type_qualifierContext
+ def enterType_qualifier(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#type_qualifier.
+ # @param ctx Type: CParser.Type_qualifierContext
+ def exitType_qualifier(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#declarator.
+ # @param ctx Type: CParser.DeclaratorContext
+ def enterDeclarator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#declarator.
+ # @param ctx Type: CParser.DeclaratorContext
+ def exitDeclarator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#direct_declarator.
+ # @param ctx Type: CParser.Direct_declaratorContext
+ def enterDirect_declarator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#direct_declarator.
+ # @param ctx Type: CParser.Direct_declaratorContext
+ def exitDirect_declarator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#declarator_suffix.
+ # @param ctx Type: CParser.Declarator_suffixContext
+ def enterDeclarator_suffix(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#declarator_suffix.
+ # @param ctx Type: CParser.Declarator_suffixContext
+ def exitDeclarator_suffix(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#pointer.
+ # @param ctx Type: CParser.PointerContext
+ def enterPointer(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#pointer.
+ # @param ctx Type: CParser.PointerContext
+ def exitPointer(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#parameter_type_list.
+ # @param ctx Type: CParser.Parameter_type_listContext
+ def enterParameter_type_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#parameter_type_list.
+ # @param ctx Type: CParser.Parameter_type_listContext
+ def exitParameter_type_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#parameter_list.
+ # @param ctx Type: CParser.Parameter_listContext
+ def enterParameter_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#parameter_list.
+ # @param ctx Type: CParser.Parameter_listContext
+ def exitParameter_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#parameter_declaration.
+ # @param ctx Type: CParser.Parameter_declarationContext
+ def enterParameter_declaration(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#parameter_declaration.
+ # @param ctx Type: CParser.Parameter_declarationContext
+ def exitParameter_declaration(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#identifier_list.
+ # @param ctx Type: CParser.Identifier_listContext
+ def enterIdentifier_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#identifier_list.
+ # @param ctx Type: CParser.Identifier_listContext
+ def exitIdentifier_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#type_name.
+ # @param ctx Type: CParser.Type_nameContext
+ def enterType_name(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#type_name.
+ # @param ctx Type: CParser.Type_nameContext
+ def exitType_name(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#abstract_declarator.
+ # @param ctx Type: CParser.Abstract_declaratorContext
+ def enterAbstract_declarator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#abstract_declarator.
+ # @param ctx Type: CParser.Abstract_declaratorContext
+ def exitAbstract_declarator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#direct_abstract_declarator.
+ # @param ctx Type: CParser.Direct_abstract_declaratorContext
+ def enterDirect_abstract_declarator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#direct_abstract_declarator.
+ # @param ctx Type: CParser.Direct_abstract_declaratorContext
+ def exitDirect_abstract_declarator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#abstract_declarator_suffix.
+ # @param ctx Type: CParser.Abstract_declarator_suffixContext
+ def enterAbstract_declarator_suffix(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#abstract_declarator_suffix.
+ # @param ctx Type: CParser.Abstract_declarator_suffixContext
+ def exitAbstract_declarator_suffix(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#initializer.
+ # @param ctx Type: CParser.InitializerContext
+ def enterInitializer(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#initializer.
+ # @param ctx Type: CParser.InitializerContext
+ def exitInitializer(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#initializer_list.
+ # @param ctx Type: CParser.Initializer_listContext
+ def enterInitializer_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#initializer_list.
+ # @param ctx Type: CParser.Initializer_listContext
+ def exitInitializer_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#argument_expression_list.
+ # @param ctx Type: CParser.Argument_expression_listContext
+ def enterArgument_expression_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#argument_expression_list.
+ # @param ctx Type: CParser.Argument_expression_listContext
+ def exitArgument_expression_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#additive_expression.
+ # @param ctx Type: CParser.Additive_expressionContext
+ def enterAdditive_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#additive_expression.
+ # @param ctx Type: CParser.Additive_expressionContext
+ def exitAdditive_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#multiplicative_expression.
+ # @param ctx Type: CParser.Multiplicative_expressionContext
+ def enterMultiplicative_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#multiplicative_expression.
+ # @param ctx Type: CParser.Multiplicative_expressionContext
+ def exitMultiplicative_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#cast_expression.
+ # @param ctx Type: CParser.Cast_expressionContext
+ def enterCast_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#cast_expression.
+ # @param ctx Type: CParser.Cast_expressionContext
+ def exitCast_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#unary_expression.
+ # @param ctx Type: CParser.Unary_expressionContext
+ def enterUnary_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#unary_expression.
+ # @param ctx Type: CParser.Unary_expressionContext
+ def exitUnary_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#postfix_expression.
+ # @param ctx Type: CParser.Postfix_expressionContext
+ def enterPostfix_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#postfix_expression.
+ # @param ctx Type: CParser.Postfix_expressionContext
+ def exitPostfix_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#macro_parameter_list.
+ # @param ctx Type: CParser.Macro_parameter_listContext
+ def enterMacro_parameter_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#macro_parameter_list.
+ # @param ctx Type: CParser.Macro_parameter_listContext
+ def exitMacro_parameter_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#unary_operator.
+ # @param ctx Type: CParser.Unary_operatorContext
+ def enterUnary_operator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#unary_operator.
+ # @param ctx Type: CParser.Unary_operatorContext
+ def exitUnary_operator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#primary_expression.
+ # @param ctx Type: CParser.Primary_expressionContext
+ def enterPrimary_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#primary_expression.
+ # @param ctx Type: CParser.Primary_expressionContext
+ def exitPrimary_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#constant.
+ # @param ctx Type: CParser.ConstantContext
+ def enterConstant(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#constant.
+ # @param ctx Type: CParser.ConstantContext
+ def exitConstant(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#expression.
+ # @param ctx Type: CParser.ExpressionContext
+ def enterExpression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#expression.
+ # @param ctx Type: CParser.ExpressionContext
+ def exitExpression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#constant_expression.
+ # @param ctx Type: CParser.Constant_expressionContext
+ def enterConstant_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#constant_expression.
+ # @param ctx Type: CParser.Constant_expressionContext
+ def exitConstant_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#assignment_expression.
+ # @param ctx Type: CParser.Assignment_expressionContext
+ def enterAssignment_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#assignment_expression.
+ # @param ctx Type: CParser.Assignment_expressionContext
+ def exitAssignment_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#lvalue.
+ # @param ctx Type: CParser.LvalueContext
+ def enterLvalue(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#lvalue.
+ # @param ctx Type: CParser.LvalueContext
+ def exitLvalue(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#assignment_operator.
+ # @param ctx Type: CParser.Assignment_operatorContext
+ def enterAssignment_operator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#assignment_operator.
+ # @param ctx Type: CParser.Assignment_operatorContext
+ def exitAssignment_operator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#conditional_expression.
+ # @param ctx Type: CParser.Conditional_expressionContext
+ def enterConditional_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#conditional_expression.
+ # @param ctx Type: CParser.Conditional_expressionContext
+ def exitConditional_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#logical_or_expression.
+ # @param ctx Type: CParser.Logical_or_expressionContext
+ def enterLogical_or_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#logical_or_expression.
+ # @param ctx Type: CParser.Logical_or_expressionContext
+ def exitLogical_or_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#logical_and_expression.
+ # @param ctx Type: CParser.Logical_and_expressionContext
+ def enterLogical_and_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#logical_and_expression.
+ # @param ctx Type: CParser.Logical_and_expressionContext
+ def exitLogical_and_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#inclusive_or_expression.
+ # @param ctx Type: CParser.Inclusive_or_expressionContext
+ def enterInclusive_or_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#inclusive_or_expression.
+ # @param ctx Type: CParser.Inclusive_or_expressionContext
+ def exitInclusive_or_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#exclusive_or_expression.
+ # @param ctx Type: CParser.Exclusive_or_expressionContext
+ def enterExclusive_or_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#exclusive_or_expression.
+ # @param ctx Type: CParser.Exclusive_or_expressionContext
+ def exitExclusive_or_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#and_expression.
+ # @param ctx Type: CParser.And_expressionContext
+ def enterAnd_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#and_expression.
+ # @param ctx Type: CParser.And_expressionContext
+ def exitAnd_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#equality_expression.
+ # @param ctx Type: CParser.Equality_expressionContext
+ def enterEquality_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#equality_expression.
+ # @param ctx Type: CParser.Equality_expressionContext
+ def exitEquality_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#relational_expression.
+ # @param ctx Type: CParser.Relational_expressionContext
+ def enterRelational_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#relational_expression.
+ # @param ctx Type: CParser.Relational_expressionContext
+ def exitRelational_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#shift_expression.
+ # @param ctx Type: CParser.Shift_expressionContext
+ def enterShift_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#shift_expression.
+ # @param ctx Type: CParser.Shift_expressionContext
+ def exitShift_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#statement.
+ # @param ctx Type: CParser.StatementContext
+ def enterStatement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#statement.
+ # @param ctx Type: CParser.StatementContext
+ def exitStatement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#asm2_statement.
+ # @param ctx Type: CParser.Asm2_statementContext
+ def enterAsm2_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#asm2_statement.
+ # @param ctx Type: CParser.Asm2_statementContext
+ def exitAsm2_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#asm1_statement.
+ # @param ctx Type: CParser.Asm1_statementContext
+ def enterAsm1_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#asm1_statement.
+ # @param ctx Type: CParser.Asm1_statementContext
+ def exitAsm1_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#asm_statement.
+ # @param ctx Type: CParser.Asm_statementContext
+ def enterAsm_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#asm_statement.
+ # @param ctx Type: CParser.Asm_statementContext
+ def exitAsm_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#macro_statement.
+ # @param ctx Type: CParser.Macro_statementContext
+ def enterMacro_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#macro_statement.
+ # @param ctx Type: CParser.Macro_statementContext
+ def exitMacro_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#labeled_statement.
+ # @param ctx Type: CParser.Labeled_statementContext
+ def enterLabeled_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#labeled_statement.
+ # @param ctx Type: CParser.Labeled_statementContext
+ def exitLabeled_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#compound_statement.
+ # @param ctx Type: CParser.Compound_statementContext
+ def enterCompound_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#compound_statement.
+ # @param ctx Type: CParser.Compound_statementContext
+ def exitCompound_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#statement_list.
+ # @param ctx Type: CParser.Statement_listContext
+ def enterStatement_list(self,ctx):
+ pass
+
+
+ # Exit a parse tree produced by CParser#statement_list.
+ # @param ctx Type: CParser.Statement_listContext
+ def exitStatement_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#expression_statement.
+ # @param ctx Type: CParser.Expression_statementContext
+ def enterExpression_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#expression_statement.
+ # @param ctx Type: CParser.Expression_statementContext
+ def exitExpression_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#selection_statement.
+ # @param ctx Type: CParser.Selection_statementContext
+ def enterSelection_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#selection_statement.
+ # @param ctx Type: CParser.Selection_statementContext
+ def exitSelection_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#iteration_statement.
+ # @param ctx Type: CParser.Iteration_statementContext
+ def enterIteration_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#iteration_statement.
+ # @param ctx Type: CParser.Iteration_statementContext
+ def exitIteration_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#jump_statement.
+ # @param ctx Type: CParser.Jump_statementContext
+ def enterJump_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#jump_statement.
+ # @param ctx Type: CParser.Jump_statementContext
+ def exitJump_statement(self,ctx):
+ pass
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/CParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/CParser.py
new file mode 100755
index 00000000..d8d55759
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/CParser.py
@@ -0,0 +1,6273 @@
+# Generated from C.g4 by ANTLR 4.7.1
+# encoding: utf-8
+from antlr4 import *
+from io import StringIO
+from typing.io import TextIO
+import sys
+
+
+## @file
+# The file defines the parser for C source files.
+#
+# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+import Ecc.CodeFragment as CodeFragment
+import Ecc.FileProfile as FileProfile
+
+def serializedATN():
+ with StringIO() as buf:
+ buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3k")
+ buf.write("\u0380\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
+ buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
+ buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
+ buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
+ buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36")
+ buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t")
+ buf.write("&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.\t.\4")
+ buf.write("/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t\64")
+ buf.write("\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t")
+ buf.write(";\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\t")
+ buf.write("D\4E\tE\4F\tF\4G\tG\4H\tH\3\2\7\2\u0092\n\2\f\2\16\2\u0095")
+ buf.write("\13\2\3\3\5\3\u0098\n\3\3\3\3\3\7\3\u009c\n\3\f\3\16\3")
+ buf.write("\u009f\13\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3\u00a7\n\3\5\3")
+ buf.write("\u00a9\n\3\3\4\5\4\u00ac\n\4\3\4\3\4\6\4\u00b0\n\4\r\4")
+ buf.write("\16\4\u00b1\3\4\3\4\3\4\5\4\u00b7\n\4\3\4\3\4\3\5\3\5")
+ buf.write("\3\5\6\5\u00be\n\5\r\5\16\5\u00bf\3\6\3\6\5\6\u00c4\n")
+ buf.write("\6\3\6\3\6\3\6\3\6\3\6\3\6\5\6\u00cc\n\6\3\6\3\6\3\6\5")
+ buf.write("\6\u00d1\n\6\3\7\3\7\3\7\7\7\u00d6\n\7\f\7\16\7\u00d9")
+ buf.write("\13\7\3\b\3\b\3\b\5\b\u00de\n\b\3\t\3\t\3\n\3\n\3\n\3")
+ buf.write("\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n")
+ buf.write("\7\n\u00f3\n\n\f\n\16\n\u00f6\13\n\3\n\3\n\5\n\u00fa\n")
+ buf.write("\n\3\13\3\13\3\f\3\f\5\f\u0100\n\f\3\f\3\f\3\f\3\f\3\f")
+ buf.write("\3\f\3\f\5\f\u0109\n\f\3\r\3\r\3\16\6\16\u010e\n\16\r")
+ buf.write("\16\16\16\u010f\3\17\3\17\3\17\3\17\3\20\3\20\6\20\u0118")
+ buf.write("\n\20\r\20\16\20\u0119\3\21\3\21\3\21\7\21\u011f\n\21")
+ buf.write("\f\21\16\21\u0122\13\21\3\22\3\22\3\22\5\22\u0127\n\22")
+ buf.write("\3\22\3\22\5\22\u012b\n\22\3\23\3\23\3\23\3\23\5\23\u0131")
+ buf.write("\n\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\5\23\u013a\n")
+ buf.write("\23\3\23\3\23\3\23\3\23\5\23\u0140\n\23\3\24\3\24\3\24")
+ buf.write("\7\24\u0145\n\24\f\24\16\24\u0148\13\24\3\25\3\25\3\25")
+ buf.write("\5\25\u014d\n\25\3\26\3\26\3\27\5\27\u0152\n\27\3\27\5")
+ buf.write("\27\u0155\n\27\3\27\5\27\u0158\n\27\3\27\5\27\u015b\n")
+ buf.write("\27\3\27\3\27\5\27\u015f\n\27\3\30\3\30\7\30\u0163\n\30")
+ buf.write("\f\30\16\30\u0166\13\30\3\30\3\30\5\30\u016a\n\30\3\30")
+ buf.write("\3\30\3\30\6\30\u016f\n\30\r\30\16\30\u0170\5\30\u0173")
+ buf.write("\n\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31")
+ buf.write("\3\31\3\31\3\31\3\31\3\31\3\31\5\31\u0185\n\31\3\32\3")
+ buf.write("\32\6\32\u0189\n\32\r\32\16\32\u018a\3\32\5\32\u018e\n")
+ buf.write("\32\3\32\3\32\3\32\5\32\u0193\n\32\3\33\3\33\3\33\5\33")
+ buf.write("\u0198\n\33\3\33\5\33\u019b\n\33\3\34\3\34\3\34\5\34\u01a0")
+ buf.write("\n\34\3\34\7\34\u01a3\n\34\f\34\16\34\u01a6\13\34\3\35")
+ buf.write("\3\35\3\35\7\35\u01ab\n\35\f\35\16\35\u01ae\13\35\3\35")
+ buf.write("\5\35\u01b1\n\35\3\35\7\35\u01b4\n\35\f\35\16\35\u01b7")
+ buf.write("\13\35\3\35\5\35\u01ba\n\35\3\36\3\36\3\36\7\36\u01bf")
+ buf.write("\n\36\f\36\16\36\u01c2\13\36\3\37\3\37\5\37\u01c6\n\37")
+ buf.write("\3\37\5\37\u01c9\n\37\3 \3 \5 \u01cd\n \3 \5 \u01d0\n")
+ buf.write(" \3!\3!\3!\3!\3!\5!\u01d7\n!\3!\7!\u01da\n!\f!\16!\u01dd")
+ buf.write("\13!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\5")
+ buf.write("\"\u01eb\n\"\3#\3#\3#\3#\5#\u01f1\n#\3#\3#\5#\u01f5\n")
+ buf.write("#\3$\3$\3$\7$\u01fa\n$\f$\16$\u01fd\13$\3%\3%\5%\u0201")
+ buf.write("\n%\3%\3%\3%\5%\u0206\n%\7%\u0208\n%\f%\16%\u020b\13%")
+ buf.write("\3&\3&\3&\3&\3&\7&\u0212\n&\f&\16&\u0215\13&\3\'\3\'\3")
+ buf.write("\'\3\'\3\'\3\'\3\'\7\'\u021e\n\'\f\'\16\'\u0221\13\'\3")
+ buf.write("(\3(\3(\3(\3(\3(\5(\u0229\n(\3)\3)\3)\3)\3)\3)\3)\3)\3")
+ buf.write(")\3)\3)\3)\3)\3)\3)\5)\u023a\n)\3*\3*\3*\3*\3*\3*\3*\3")
+ buf.write("*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3")
+ buf.write("*\3*\3*\3*\7*\u0259\n*\f*\16*\u025c\13*\3+\3+\3+\7+\u0261")
+ buf.write("\n+\f+\16+\u0264\13+\3,\3,\3-\3-\3-\3-\3-\3-\5-\u026e")
+ buf.write("\n-\3.\3.\3.\3.\3.\7.\u0275\n.\f.\16.\u0278\13.\3.\6.")
+ buf.write("\u027b\n.\r.\16.\u027c\6.\u027f\n.\r.\16.\u0280\3.\7.")
+ buf.write("\u0284\n.\f.\16.\u0287\13.\3.\5.\u028a\n.\3/\3/\3/\7/")
+ buf.write("\u028f\n/\f/\16/\u0292\13/\3\60\3\60\3\61\3\61\3\61\3")
+ buf.write("\61\3\61\5\61\u029b\n\61\3\62\3\62\3\63\3\63\3\64\3\64")
+ buf.write("\3\64\3\64\3\64\3\64\3\64\5\64\u02a8\n\64\3\65\3\65\3")
+ buf.write("\65\7\65\u02ad\n\65\f\65\16\65\u02b0\13\65\3\66\3\66\3")
+ buf.write("\66\7\66\u02b5\n\66\f\66\16\66\u02b8\13\66\3\67\3\67\3")
+ buf.write("\67\7\67\u02bd\n\67\f\67\16\67\u02c0\13\67\38\38\38\7")
+ buf.write("8\u02c5\n8\f8\168\u02c8\138\39\39\39\79\u02cd\n9\f9\16")
+ buf.write("9\u02d0\139\3:\3:\3:\7:\u02d5\n:\f:\16:\u02d8\13:\3;\3")
+ buf.write(";\3;\7;\u02dd\n;\f;\16;\u02e0\13;\3<\3<\3<\7<\u02e5\n")
+ buf.write("<\f<\16<\u02e8\13<\3=\3=\3=\3=\3=\3=\3=\3=\3=\3=\3=\5")
+ buf.write("=\u02f5\n=\3>\5>\u02f8\n>\3>\3>\3>\7>\u02fd\n>\f>\16>")
+ buf.write("\u0300\13>\3>\3>\3>\3?\3?\3?\7?\u0308\n?\f?\16?\u030b")
+ buf.write("\13?\3?\3?\3@\3@\3@\7@\u0312\n@\f@\16@\u0315\13@\3@\3")
+ buf.write("@\3A\3A\3A\7A\u031c\nA\fA\16A\u031f\13A\3A\5A\u0322\n")
+ buf.write("A\3A\5A\u0325\nA\3A\3A\3B\3B\3B\3B\3B\3B\3B\3B\3B\3B\3")
+ buf.write("B\5B\u0334\nB\3C\3C\7C\u0338\nC\fC\16C\u033b\13C\3C\5")
+ buf.write("C\u033e\nC\3C\3C\3D\6D\u0343\nD\rD\16D\u0344\3E\3E\3E")
+ buf.write("\3E\5E\u034b\nE\3F\3F\3F\3F\3F\3F\3F\3F\5F\u0355\nF\3")
+ buf.write("F\3F\3F\3F\3F\3F\5F\u035d\nF\3G\3G\3G\3G\3G\3G\3G\3G\3")
+ buf.write("G\3G\3G\3G\3G\3G\3G\3G\5G\u036f\nG\3H\3H\3H\3H\3H\3H\3")
+ buf.write("H\3H\3H\3H\3H\3H\3H\5H\u037e\nH\3H\2\2I\2\4\6\b\n\f\16")
+ buf.write("\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDF")
+ buf.write("HJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0086")
+ buf.write("\u0088\u008a\u008c\u008e\2\f\3\2\b\f\3\2\27\30\3\2\33")
+ buf.write("\'\5\2,,./\679\4\2\7\7:C\3\2IJ\3\2KN\3\2OP\3\2\4\4\3\2")
+ buf.write("\26\26\2\u03d8\2\u0093\3\2\2\2\4\u00a8\3\2\2\2\6\u00ab")
+ buf.write("\3\2\2\2\b\u00bd\3\2\2\2\n\u00d0\3\2\2\2\f\u00d2\3\2\2")
+ buf.write("\2\16\u00da\3\2\2\2\20\u00df\3\2\2\2\22\u00f9\3\2\2\2")
+ buf.write("\24\u00fb\3\2\2\2\26\u0108\3\2\2\2\30\u010a\3\2\2\2\32")
+ buf.write("\u010d\3\2\2\2\34\u0111\3\2\2\2\36\u0117\3\2\2\2 \u011b")
+ buf.write("\3\2\2\2\"\u012a\3\2\2\2$\u013f\3\2\2\2&\u0141\3\2\2\2")
+ buf.write("(\u0149\3\2\2\2*\u014e\3\2\2\2,\u015e\3\2\2\2.\u0172\3")
+ buf.write("\2\2\2\60\u0184\3\2\2\2\62\u0192\3\2\2\2\64\u0194\3\2")
+ buf.write("\2\2\66\u019c\3\2\2\28\u01b9\3\2\2\2:\u01bb\3\2\2\2<\u01c8")
+ buf.write("\3\2\2\2>\u01cf\3\2\2\2@\u01d6\3\2\2\2B\u01ea\3\2\2\2")
+ buf.write("D\u01f4\3\2\2\2F\u01f6\3\2\2\2H\u01fe\3\2\2\2J\u020c\3")
+ buf.write("\2\2\2L\u0216\3\2\2\2N\u0228\3\2\2\2P\u0239\3\2\2\2R\u023b")
+ buf.write("\3\2\2\2T\u025d\3\2\2\2V\u0265\3\2\2\2X\u026d\3\2\2\2")
+ buf.write("Z\u0289\3\2\2\2\\\u028b\3\2\2\2^\u0293\3\2\2\2`\u029a")
+ buf.write("\3\2\2\2b\u029c\3\2\2\2d\u029e\3\2\2\2f\u02a0\3\2\2\2")
+ buf.write("h\u02a9\3\2\2\2j\u02b1\3\2\2\2l\u02b9\3\2\2\2n\u02c1\3")
+ buf.write("\2\2\2p\u02c9\3\2\2\2r\u02d1\3\2\2\2t\u02d9\3\2\2\2v\u02e1")
+ buf.write("\3\2\2\2x\u02f4\3\2\2\2z\u02f7\3\2\2\2|\u0304\3\2\2\2")
+ buf.write("~\u030e\3\2\2\2\u0080\u0318\3\2\2\2\u0082\u0333\3\2\2")
+ buf.write("\2\u0084\u0335\3\2\2\2\u0086\u0342\3\2\2\2\u0088\u034a")
+ buf.write("\3\2\2\2\u008a\u035c\3\2\2\2\u008c\u036e\3\2\2\2\u008e")
+ buf.write("\u037d\3\2\2\2\u0090\u0092\5\4\3\2\u0091\u0090\3\2\2\2")
+ buf.write("\u0092\u0095\3\2\2\2\u0093\u0091\3\2\2\2\u0093\u0094\3")
+ buf.write("\2\2\2\u0094\3\3\2\2\2\u0095\u0093\3\2\2\2\u0096\u0098")
+ buf.write("\5\b\5\2\u0097\u0096\3\2\2\2\u0097\u0098\3\2\2\2\u0098")
+ buf.write("\u0099\3\2\2\2\u0099\u009d\5,\27\2\u009a\u009c\5\n\6\2")
+ buf.write("\u009b\u009a\3\2\2\2\u009c\u009f\3\2\2\2\u009d\u009b\3")
+ buf.write("\2\2\2\u009d\u009e\3\2\2\2\u009e\u00a0\3\2\2\2\u009f\u009d")
+ buf.write("\3\2\2\2\u00a0\u00a1\7\3\2\2\u00a1\u00a9\3\2\2\2\u00a2")
+ buf.write("\u00a9\5\6\4\2\u00a3\u00a9\5\n\6\2\u00a4\u00a6\5\u0080")
+ buf.write("A\2\u00a5\u00a7\7\4\2\2\u00a6\u00a5\3\2\2\2\u00a6\u00a7")
+ buf.write("\3\2\2\2\u00a7\u00a9\3\2\2\2\u00a8\u0097\3\2\2\2\u00a8")
+ buf.write("\u00a2\3\2\2\2\u00a8\u00a3\3\2\2\2\u00a8\u00a4\3\2\2\2")
+ buf.write("\u00a9\5\3\2\2\2\u00aa\u00ac\5\b\5\2\u00ab\u00aa\3\2\2")
+ buf.write("\2\u00ab\u00ac\3\2\2\2\u00ac\u00ad\3\2\2\2\u00ad\u00b6")
+ buf.write("\5,\27\2\u00ae\u00b0\5\n\6\2\u00af\u00ae\3\2\2\2\u00b0")
+ buf.write("\u00b1\3\2\2\2\u00b1\u00af\3\2\2\2\u00b1\u00b2\3\2\2\2")
+ buf.write("\u00b2\u00b3\3\2\2\2\u00b3\u00b4\5\u0084C\2\u00b4\u00b7")
+ buf.write("\3\2\2\2\u00b5\u00b7\5\u0084C\2\u00b6\u00af\3\2\2\2\u00b6")
+ buf.write("\u00b5\3\2\2\2\u00b7\u00b8\3\2\2\2\u00b8\u00b9\b\4\1\2")
+ buf.write("\u00b9\7\3\2\2\2\u00ba\u00be\5\20\t\2\u00bb\u00be\5\22")
+ buf.write("\n\2\u00bc\u00be\5*\26\2\u00bd\u00ba\3\2\2\2\u00bd\u00bb")
+ buf.write("\3\2\2\2\u00bd\u00bc\3\2\2\2\u00be\u00bf\3\2\2\2\u00bf")
+ buf.write("\u00bd\3\2\2\2\u00bf\u00c0\3\2\2\2\u00c0\t\3\2\2\2\u00c1")
+ buf.write("\u00c3\7\5\2\2\u00c2\u00c4\5\b\5\2\u00c3\u00c2\3\2\2\2")
+ buf.write("\u00c3\u00c4\3\2\2\2\u00c4\u00c5\3\2\2\2\u00c5\u00c6\5")
+ buf.write("\f\7\2\u00c6\u00c7\7\4\2\2\u00c7\u00c8\b\6\1\2\u00c8\u00d1")
+ buf.write("\3\2\2\2\u00c9\u00cb\5\b\5\2\u00ca\u00cc\5\f\7\2\u00cb")
+ buf.write("\u00ca\3\2\2\2\u00cb\u00cc\3\2\2\2\u00cc\u00cd\3\2\2\2")
+ buf.write("\u00cd\u00ce\7\4\2\2\u00ce\u00cf\b\6\1\2\u00cf\u00d1\3")
+ buf.write("\2\2\2\u00d0\u00c1\3\2\2\2\u00d0\u00c9\3\2\2\2\u00d1\13")
+ buf.write("\3\2\2\2\u00d2\u00d7\5\16\b\2\u00d3\u00d4\7\6\2\2\u00d4")
+ buf.write("\u00d6\5\16\b\2\u00d5\u00d3\3\2\2\2\u00d6\u00d9\3\2\2")
+ buf.write("\2\u00d7\u00d5\3\2\2\2\u00d7\u00d8\3\2\2\2\u00d8\r\3\2")
+ buf.write("\2\2\u00d9\u00d7\3\2\2\2\u00da\u00dd\5,\27\2\u00db\u00dc")
+ buf.write("\7\7\2\2\u00dc\u00de\5D#\2\u00dd\u00db\3\2\2\2\u00dd\u00de")
+ buf.write("\3\2\2\2\u00de\17\3\2\2\2\u00df\u00e0\t\2\2\2\u00e0\21")
+ buf.write("\3\2\2\2\u00e1\u00fa\7\r\2\2\u00e2\u00fa\7\16\2\2\u00e3")
+ buf.write("\u00fa\7\17\2\2\u00e4\u00fa\7\20\2\2\u00e5\u00fa\7\21")
+ buf.write("\2\2\u00e6\u00fa\7\22\2\2\u00e7\u00fa\7\23\2\2\u00e8\u00fa")
+ buf.write("\7\24\2\2\u00e9\u00fa\7\25\2\2\u00ea\u00eb\5\26\f\2\u00eb")
+ buf.write("\u00ec\b\n\1\2\u00ec\u00fa\3\2\2\2\u00ed\u00ee\5$\23\2")
+ buf.write("\u00ee\u00ef\b\n\1\2\u00ef\u00fa\3\2\2\2\u00f0\u00f4\7")
+ buf.write("_\2\2\u00f1\u00f3\5*\26\2\u00f2\u00f1\3\2\2\2\u00f3\u00f6")
+ buf.write("\3\2\2\2\u00f4\u00f2\3\2\2\2\u00f4\u00f5\3\2\2\2\u00f5")
+ buf.write("\u00f7\3\2\2\2\u00f6\u00f4\3\2\2\2\u00f7\u00fa\5,\27\2")
+ buf.write("\u00f8\u00fa\5\24\13\2\u00f9\u00e1\3\2\2\2\u00f9\u00e2")
+ buf.write("\3\2\2\2\u00f9\u00e3\3\2\2\2\u00f9\u00e4\3\2\2\2\u00f9")
+ buf.write("\u00e5\3\2\2\2\u00f9\u00e6\3\2\2\2\u00f9\u00e7\3\2\2\2")
+ buf.write("\u00f9\u00e8\3\2\2\2\u00f9\u00e9\3\2\2\2\u00f9\u00ea\3")
+ buf.write("\2\2\2\u00f9\u00ed\3\2\2\2\u00f9\u00f0\3\2\2\2\u00f9\u00f8")
+ buf.write("\3\2\2\2\u00fa\23\3\2\2\2\u00fb\u00fc\7_\2\2\u00fc\25")
+ buf.write("\3\2\2\2\u00fd\u00ff\5\30\r\2\u00fe\u0100\7_\2\2\u00ff")
+ buf.write("\u00fe\3\2\2\2\u00ff\u0100\3\2\2\2\u0100\u0101\3\2\2\2")
+ buf.write("\u0101\u0102\7\3\2\2\u0102\u0103\5\32\16\2\u0103\u0104")
+ buf.write("\7\26\2\2\u0104\u0109\3\2\2\2\u0105\u0106\5\30\r\2\u0106")
+ buf.write("\u0107\7_\2\2\u0107\u0109\3\2\2\2\u0108\u00fd\3\2\2\2")
+ buf.write("\u0108\u0105\3\2\2\2\u0109\27\3\2\2\2\u010a\u010b\t\3")
+ buf.write("\2\2\u010b\31\3\2\2\2\u010c\u010e\5\34\17\2\u010d\u010c")
+ buf.write("\3\2\2\2\u010e\u010f\3\2\2\2\u010f\u010d\3\2\2\2\u010f")
+ buf.write("\u0110\3\2\2\2\u0110\33\3\2\2\2\u0111\u0112\5\36\20\2")
+ buf.write("\u0112\u0113\5 \21\2\u0113\u0114\7\4\2\2\u0114\35\3\2")
+ buf.write("\2\2\u0115\u0118\5*\26\2\u0116\u0118\5\22\n\2\u0117\u0115")
+ buf.write("\3\2\2\2\u0117\u0116\3\2\2\2\u0118\u0119\3\2\2\2\u0119")
+ buf.write("\u0117\3\2\2\2\u0119\u011a\3\2\2\2\u011a\37\3\2\2\2\u011b")
+ buf.write("\u0120\5\"\22\2\u011c\u011d\7\6\2\2\u011d\u011f\5\"\22")
+ buf.write("\2\u011e\u011c\3\2\2\2\u011f\u0122\3\2\2\2\u0120\u011e")
+ buf.write("\3\2\2\2\u0120\u0121\3\2\2\2\u0121!\3\2\2\2\u0122\u0120")
+ buf.write("\3\2\2\2\u0123\u0126\5,\27\2\u0124\u0125\7\31\2\2\u0125")
+ buf.write("\u0127\5^\60\2\u0126\u0124\3\2\2\2\u0126\u0127\3\2\2\2")
+ buf.write("\u0127\u012b\3\2\2\2\u0128\u0129\7\31\2\2\u0129\u012b")
+ buf.write("\5^\60\2\u012a\u0123\3\2\2\2\u012a\u0128\3\2\2\2\u012b")
+ buf.write("#\3\2\2\2\u012c\u012d\7\32\2\2\u012d\u012e\7\3\2\2\u012e")
+ buf.write("\u0130\5&\24\2\u012f\u0131\7\6\2\2\u0130\u012f\3\2\2\2")
+ buf.write("\u0130\u0131\3\2\2\2\u0131\u0132\3\2\2\2\u0132\u0133\7")
+ buf.write("\26\2\2\u0133\u0140\3\2\2\2\u0134\u0135\7\32\2\2\u0135")
+ buf.write("\u0136\7_\2\2\u0136\u0137\7\3\2\2\u0137\u0139\5&\24\2")
+ buf.write("\u0138\u013a\7\6\2\2\u0139\u0138\3\2\2\2\u0139\u013a\3")
+ buf.write("\2\2\2\u013a\u013b\3\2\2\2\u013b\u013c\7\26\2\2\u013c")
+ buf.write("\u0140\3\2\2\2\u013d\u013e\7\32\2\2\u013e\u0140\7_\2\2")
+ buf.write("\u013f\u012c\3\2\2\2\u013f\u0134\3\2\2\2\u013f\u013d\3")
+ buf.write("\2\2\2\u0140%\3\2\2\2\u0141\u0146\5(\25\2\u0142\u0143")
+ buf.write("\7\6\2\2\u0143\u0145\5(\25\2\u0144\u0142\3\2\2\2\u0145")
+ buf.write("\u0148\3\2\2\2\u0146\u0144\3\2\2\2\u0146\u0147\3\2\2\2")
+ buf.write("\u0147\'\3\2\2\2\u0148\u0146\3\2\2\2\u0149\u014c\7_\2")
+ buf.write("\2\u014a\u014b\7\7\2\2\u014b\u014d\5^\60\2\u014c\u014a")
+ buf.write("\3\2\2\2\u014c\u014d\3\2\2\2\u014d)\3\2\2\2\u014e\u014f")
+ buf.write("\t\4\2\2\u014f+\3\2\2\2\u0150\u0152\5\62\32\2\u0151\u0150")
+ buf.write("\3\2\2\2\u0151\u0152\3\2\2\2\u0152\u0154\3\2\2\2\u0153")
+ buf.write("\u0155\7$\2\2\u0154\u0153\3\2\2\2\u0154\u0155\3\2\2\2")
+ buf.write("\u0155\u0157\3\2\2\2\u0156\u0158\7%\2\2\u0157\u0156\3")
+ buf.write("\2\2\2\u0157\u0158\3\2\2\2\u0158\u015a\3\2\2\2\u0159\u015b")
+ buf.write("\7&\2\2\u015a\u0159\3\2\2\2\u015a\u015b\3\2\2\2\u015b")
+ buf.write("\u015c\3\2\2\2\u015c\u015f\5.\30\2\u015d\u015f\5\62\32")
+ buf.write("\2\u015e\u0151\3\2\2\2\u015e\u015d\3\2\2\2\u015f-\3\2")
+ buf.write("\2\2\u0160\u0164\7_\2\2\u0161\u0163\5\60\31\2\u0162\u0161")
+ buf.write("\3\2\2\2\u0163\u0166\3\2\2\2\u0164\u0162\3\2\2\2\u0164")
+ buf.write("\u0165\3\2\2\2\u0165\u0173\3\2\2\2\u0166\u0164\3\2\2\2")
+ buf.write("\u0167\u0169\7(\2\2\u0168\u016a\7$\2\2\u0169\u0168\3\2")
+ buf.write("\2\2\u0169\u016a\3\2\2\2\u016a\u016b\3\2\2\2\u016b\u016c")
+ buf.write("\5,\27\2\u016c\u016e\7)\2\2\u016d\u016f\5\60\31\2\u016e")
+ buf.write("\u016d\3\2\2\2\u016f\u0170\3\2\2\2\u0170\u016e\3\2\2\2")
+ buf.write("\u0170\u0171\3\2\2\2\u0171\u0173\3\2\2\2\u0172\u0160\3")
+ buf.write("\2\2\2\u0172\u0167\3\2\2\2\u0173/\3\2\2\2\u0174\u0175")
+ buf.write("\7*\2\2\u0175\u0176\5^\60\2\u0176\u0177\7+\2\2\u0177\u0185")
+ buf.write("\3\2\2\2\u0178\u0179\7*\2\2\u0179\u0185\7+\2\2\u017a\u017b")
+ buf.write("\7(\2\2\u017b\u017c\5\64\33\2\u017c\u017d\7)\2\2\u017d")
+ buf.write("\u0185\3\2\2\2\u017e\u017f\7(\2\2\u017f\u0180\5:\36\2")
+ buf.write("\u0180\u0181\7)\2\2\u0181\u0185\3\2\2\2\u0182\u0183\7")
+ buf.write("(\2\2\u0183\u0185\7)\2\2\u0184\u0174\3\2\2\2\u0184\u0178")
+ buf.write("\3\2\2\2\u0184\u017a\3\2\2\2\u0184\u017e\3\2\2\2\u0184")
+ buf.write("\u0182\3\2\2\2\u0185\61\3\2\2\2\u0186\u0188\7,\2\2\u0187")
+ buf.write("\u0189\5*\26\2\u0188\u0187\3\2\2\2\u0189\u018a\3\2\2\2")
+ buf.write("\u018a\u0188\3\2\2\2\u018a\u018b\3\2\2\2\u018b\u018d\3")
+ buf.write("\2\2\2\u018c\u018e\5\62\32\2\u018d\u018c\3\2\2\2\u018d")
+ buf.write("\u018e\3\2\2\2\u018e\u0193\3\2\2\2\u018f\u0190\7,\2\2")
+ buf.write("\u0190\u0193\5\62\32\2\u0191\u0193\7,\2\2\u0192\u0186")
+ buf.write("\3\2\2\2\u0192\u018f\3\2\2\2\u0192\u0191\3\2\2\2\u0193")
+ buf.write("\63\3\2\2\2\u0194\u019a\5\66\34\2\u0195\u0197\7\6\2\2")
+ buf.write("\u0196\u0198\7\37\2\2\u0197\u0196\3\2\2\2\u0197\u0198")
+ buf.write("\3\2\2\2\u0198\u0199\3\2\2\2\u0199\u019b\7-\2\2\u019a")
+ buf.write("\u0195\3\2\2\2\u019a\u019b\3\2\2\2\u019b\65\3\2\2\2\u019c")
+ buf.write("\u01a4\58\35\2\u019d\u019f\7\6\2\2\u019e\u01a0\7\37\2")
+ buf.write("\2\u019f\u019e\3\2\2\2\u019f\u01a0\3\2\2\2\u01a0\u01a1")
+ buf.write("\3\2\2\2\u01a1\u01a3\58\35\2\u01a2\u019d\3\2\2\2\u01a3")
+ buf.write("\u01a6\3\2\2\2\u01a4\u01a2\3\2\2\2\u01a4\u01a5\3\2\2\2")
+ buf.write("\u01a5\67\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a7\u01ac\5\b")
+ buf.write("\5\2\u01a8\u01ab\5,\27\2\u01a9\u01ab\5> \2\u01aa\u01a8")
+ buf.write("\3\2\2\2\u01aa\u01a9\3\2\2\2\u01ab\u01ae\3\2\2\2\u01ac")
+ buf.write("\u01aa\3\2\2\2\u01ac\u01ad\3\2\2\2\u01ad\u01b0\3\2\2\2")
+ buf.write("\u01ae\u01ac\3\2\2\2\u01af\u01b1\7\37\2\2\u01b0\u01af")
+ buf.write("\3\2\2\2\u01b0\u01b1\3\2\2\2\u01b1\u01ba\3\2\2\2\u01b2")
+ buf.write("\u01b4\5\62\32\2\u01b3\u01b2\3\2\2\2\u01b4\u01b7\3\2\2")
+ buf.write("\2\u01b5\u01b3\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6\u01b8")
+ buf.write("\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b8\u01ba\7_\2\2\u01b9")
+ buf.write("\u01a7\3\2\2\2\u01b9\u01b5\3\2\2\2\u01ba9\3\2\2\2\u01bb")
+ buf.write("\u01c0\7_\2\2\u01bc\u01bd\7\6\2\2\u01bd\u01bf\7_\2\2\u01be")
+ buf.write("\u01bc\3\2\2\2\u01bf\u01c2\3\2\2\2\u01c0\u01be\3\2\2\2")
+ buf.write("\u01c0\u01c1\3\2\2\2\u01c1;\3\2\2\2\u01c2\u01c0\3\2\2")
+ buf.write("\2\u01c3\u01c5\5\36\20\2\u01c4\u01c6\5> \2\u01c5\u01c4")
+ buf.write("\3\2\2\2\u01c5\u01c6\3\2\2\2\u01c6\u01c9\3\2\2\2\u01c7")
+ buf.write("\u01c9\5\24\13\2\u01c8\u01c3\3\2\2\2\u01c8\u01c7\3\2\2")
+ buf.write("\2\u01c9=\3\2\2\2\u01ca\u01cc\5\62\32\2\u01cb\u01cd\5")
+ buf.write("@!\2\u01cc\u01cb\3\2\2\2\u01cc\u01cd\3\2\2\2\u01cd\u01d0")
+ buf.write("\3\2\2\2\u01ce\u01d0\5@!\2\u01cf\u01ca\3\2\2\2\u01cf\u01ce")
+ buf.write("\3\2\2\2\u01d0?\3\2\2\2\u01d1\u01d2\7(\2\2\u01d2\u01d3")
+ buf.write("\5> \2\u01d3\u01d4\7)\2\2\u01d4\u01d7\3\2\2\2\u01d5\u01d7")
+ buf.write("\5B\"\2\u01d6\u01d1\3\2\2\2\u01d6\u01d5\3\2\2\2\u01d7")
+ buf.write("\u01db\3\2\2\2\u01d8\u01da\5B\"\2\u01d9\u01d8\3\2\2\2")
+ buf.write("\u01da\u01dd\3\2\2\2\u01db\u01d9\3\2\2\2\u01db\u01dc\3")
+ buf.write("\2\2\2\u01dcA\3\2\2\2\u01dd\u01db\3\2\2\2\u01de\u01df")
+ buf.write("\7*\2\2\u01df\u01eb\7+\2\2\u01e0\u01e1\7*\2\2\u01e1\u01e2")
+ buf.write("\5^\60\2\u01e2\u01e3\7+\2\2\u01e3\u01eb\3\2\2\2\u01e4")
+ buf.write("\u01e5\7(\2\2\u01e5\u01eb\7)\2\2\u01e6\u01e7\7(\2\2\u01e7")
+ buf.write("\u01e8\5\64\33\2\u01e8\u01e9\7)\2\2\u01e9\u01eb\3\2\2")
+ buf.write("\2\u01ea\u01de\3\2\2\2\u01ea\u01e0\3\2\2\2\u01ea\u01e4")
+ buf.write("\3\2\2\2\u01ea\u01e6\3\2\2\2\u01ebC\3\2\2\2\u01ec\u01f5")
+ buf.write("\5`\61\2\u01ed\u01ee\7\3\2\2\u01ee\u01f0\5F$\2\u01ef\u01f1")
+ buf.write("\7\6\2\2\u01f0\u01ef\3\2\2\2\u01f0\u01f1\3\2\2\2\u01f1")
+ buf.write("\u01f2\3\2\2\2\u01f2\u01f3\7\26\2\2\u01f3\u01f5\3\2\2")
+ buf.write("\2\u01f4\u01ec\3\2\2\2\u01f4\u01ed\3\2\2\2\u01f5E\3\2")
+ buf.write("\2\2\u01f6\u01fb\5D#\2\u01f7\u01f8\7\6\2\2\u01f8\u01fa")
+ buf.write("\5D#\2\u01f9\u01f7\3\2\2\2\u01fa\u01fd\3\2\2\2\u01fb\u01f9")
+ buf.write("\3\2\2\2\u01fb\u01fc\3\2\2\2\u01fcG\3\2\2\2\u01fd\u01fb")
+ buf.write("\3\2\2\2\u01fe\u0200\5`\61\2\u01ff\u0201\7\37\2\2\u0200")
+ buf.write("\u01ff\3\2\2\2\u0200\u0201\3\2\2\2\u0201\u0209\3\2\2\2")
+ buf.write("\u0202\u0203\7\6\2\2\u0203\u0205\5`\61\2\u0204\u0206\7")
+ buf.write("\37\2\2\u0205\u0204\3\2\2\2\u0205\u0206\3\2\2\2\u0206")
+ buf.write("\u0208\3\2\2\2\u0207\u0202\3\2\2\2\u0208\u020b\3\2\2\2")
+ buf.write("\u0209\u0207\3\2\2\2\u0209\u020a\3\2\2\2\u020aI\3\2\2")
+ buf.write("\2\u020b\u0209\3\2\2\2\u020c\u0213\5L\'\2\u020d\u020e")
+ buf.write("\7.\2\2\u020e\u0212\5L\'\2\u020f\u0210\7/\2\2\u0210\u0212")
+ buf.write("\5L\'\2\u0211\u020d\3\2\2\2\u0211\u020f\3\2\2\2\u0212")
+ buf.write("\u0215\3\2\2\2\u0213\u0211\3\2\2\2\u0213\u0214\3\2\2\2")
+ buf.write("\u0214K\3\2\2\2\u0215\u0213\3\2\2\2\u0216\u021f\5N(\2")
+ buf.write("\u0217\u0218\7,\2\2\u0218\u021e\5N(\2\u0219\u021a\7\60")
+ buf.write("\2\2\u021a\u021e\5N(\2\u021b\u021c\7\61\2\2\u021c\u021e")
+ buf.write("\5N(\2\u021d\u0217\3\2\2\2\u021d\u0219\3\2\2\2\u021d\u021b")
+ buf.write("\3\2\2\2\u021e\u0221\3\2\2\2\u021f\u021d\3\2\2\2\u021f")
+ buf.write("\u0220\3\2\2\2\u0220M\3\2\2\2\u0221\u021f\3\2\2\2\u0222")
+ buf.write("\u0223\7(\2\2\u0223\u0224\5<\37\2\u0224\u0225\7)\2\2\u0225")
+ buf.write("\u0226\5N(\2\u0226\u0229\3\2\2\2\u0227\u0229\5P)\2\u0228")
+ buf.write("\u0222\3\2\2\2\u0228\u0227\3\2\2\2\u0229O\3\2\2\2\u022a")
+ buf.write("\u023a\5R*\2\u022b\u022c\7\62\2\2\u022c\u023a\5P)\2\u022d")
+ buf.write("\u022e\7\63\2\2\u022e\u023a\5P)\2\u022f\u0230\5V,\2\u0230")
+ buf.write("\u0231\5N(\2\u0231\u023a\3\2\2\2\u0232\u0233\7\64\2\2")
+ buf.write("\u0233\u023a\5P)\2\u0234\u0235\7\64\2\2\u0235\u0236\7")
+ buf.write("(\2\2\u0236\u0237\5<\37\2\u0237\u0238\7)\2\2\u0238\u023a")
+ buf.write("\3\2\2\2\u0239\u022a\3\2\2\2\u0239\u022b\3\2\2\2\u0239")
+ buf.write("\u022d\3\2\2\2\u0239\u022f\3\2\2\2\u0239\u0232\3\2\2\2")
+ buf.write("\u0239\u0234\3\2\2\2\u023aQ\3\2\2\2\u023b\u023c\5X-\2")
+ buf.write("\u023c\u025a\b*\1\2\u023d\u023e\7*\2\2\u023e\u023f\5\\")
+ buf.write("/\2\u023f\u0240\7+\2\2\u0240\u0259\3\2\2\2\u0241\u0242")
+ buf.write("\7(\2\2\u0242\u0243\7)\2\2\u0243\u0259\b*\1\2\u0244\u0245")
+ buf.write("\7(\2\2\u0245\u0246\5H%\2\u0246\u0247\7)\2\2\u0247\u0248")
+ buf.write("\b*\1\2\u0248\u0259\3\2\2\2\u0249\u024a\7(\2\2\u024a\u024b")
+ buf.write("\5T+\2\u024b\u024c\7)\2\2\u024c\u0259\3\2\2\2\u024d\u024e")
+ buf.write("\7\65\2\2\u024e\u024f\7_\2\2\u024f\u0259\b*\1\2\u0250")
+ buf.write("\u0251\7,\2\2\u0251\u0252\7_\2\2\u0252\u0259\b*\1\2\u0253")
+ buf.write("\u0254\7\66\2\2\u0254\u0255\7_\2\2\u0255\u0259\b*\1\2")
+ buf.write("\u0256\u0259\7\62\2\2\u0257\u0259\7\63\2\2\u0258\u023d")
+ buf.write("\3\2\2\2\u0258\u0241\3\2\2\2\u0258\u0244\3\2\2\2\u0258")
+ buf.write("\u0249\3\2\2\2\u0258\u024d\3\2\2\2\u0258\u0250\3\2\2\2")
+ buf.write("\u0258\u0253\3\2\2\2\u0258\u0256\3\2\2\2\u0258\u0257\3")
+ buf.write("\2\2\2\u0259\u025c\3\2\2\2\u025a\u0258\3\2\2\2\u025a\u025b")
+ buf.write("\3\2\2\2\u025bS\3\2\2\2\u025c\u025a\3\2\2\2\u025d\u0262")
+ buf.write("\58\35\2\u025e\u025f\7\6\2\2\u025f\u0261\58\35\2\u0260")
+ buf.write("\u025e\3\2\2\2\u0261\u0264\3\2\2\2\u0262\u0260\3\2\2\2")
+ buf.write("\u0262\u0263\3\2\2\2\u0263U\3\2\2\2\u0264\u0262\3\2\2")
+ buf.write("\2\u0265\u0266\t\5\2\2\u0266W\3\2\2\2\u0267\u026e\7_\2")
+ buf.write("\2\u0268\u026e\5Z.\2\u0269\u026a\7(\2\2\u026a\u026b\5")
+ buf.write("\\/\2\u026b\u026c\7)\2\2\u026c\u026e\3\2\2\2\u026d\u0267")
+ buf.write("\3\2\2\2\u026d\u0268\3\2\2\2\u026d\u0269\3\2\2\2\u026e")
+ buf.write("Y\3\2\2\2\u026f\u028a\7b\2\2\u0270\u028a\7d\2\2\u0271")
+ buf.write("\u028a\7c\2\2\u0272\u028a\7`\2\2\u0273\u0275\7_\2\2\u0274")
+ buf.write("\u0273\3\2\2\2\u0275\u0278\3\2\2\2\u0276\u0274\3\2\2\2")
+ buf.write("\u0276\u0277\3\2\2\2\u0277\u027a\3\2\2\2\u0278\u0276\3")
+ buf.write("\2\2\2\u0279\u027b\7a\2\2\u027a\u0279\3\2\2\2\u027b\u027c")
+ buf.write("\3\2\2\2\u027c\u027a\3\2\2\2\u027c\u027d\3\2\2\2\u027d")
+ buf.write("\u027f\3\2\2\2\u027e\u0276\3\2\2\2\u027f\u0280\3\2\2\2")
+ buf.write("\u0280\u027e\3\2\2\2\u0280\u0281\3\2\2\2\u0281\u0285\3")
+ buf.write("\2\2\2\u0282\u0284\7_\2\2\u0283\u0282\3\2\2\2\u0284\u0287")
+ buf.write("\3\2\2\2\u0285\u0283\3\2\2\2\u0285\u0286\3\2\2\2\u0286")
+ buf.write("\u028a\3\2\2\2\u0287\u0285\3\2\2\2\u0288\u028a\7e\2\2")
+ buf.write("\u0289\u026f\3\2\2\2\u0289\u0270\3\2\2\2\u0289\u0271\3")
+ buf.write("\2\2\2\u0289\u0272\3\2\2\2\u0289\u027e\3\2\2\2\u0289\u0288")
+ buf.write("\3\2\2\2\u028a[\3\2\2\2\u028b\u0290\5`\61\2\u028c\u028d")
+ buf.write("\7\6\2\2\u028d\u028f\5`\61\2\u028e\u028c\3\2\2\2\u028f")
+ buf.write("\u0292\3\2\2\2\u0290\u028e\3\2\2\2\u0290\u0291\3\2\2\2")
+ buf.write("\u0291]\3\2\2\2\u0292\u0290\3\2\2\2\u0293\u0294\5f\64")
+ buf.write("\2\u0294_\3\2\2\2\u0295\u0296\5b\62\2\u0296\u0297\5d\63")
+ buf.write("\2\u0297\u0298\5`\61\2\u0298\u029b\3\2\2\2\u0299\u029b")
+ buf.write("\5f\64\2\u029a\u0295\3\2\2\2\u029a\u0299\3\2\2\2\u029b")
+ buf.write("a\3\2\2\2\u029c\u029d\5P)\2\u029dc\3\2\2\2\u029e\u029f")
+ buf.write("\t\6\2\2\u029fe\3\2\2\2\u02a0\u02a7\5h\65\2\u02a1\u02a2")
+ buf.write("\7D\2\2\u02a2\u02a3\5\\/\2\u02a3\u02a4\7\31\2\2\u02a4")
+ buf.write("\u02a5\5f\64\2\u02a5\u02a6\b\64\1\2\u02a6\u02a8\3\2\2")
+ buf.write("\2\u02a7\u02a1\3\2\2\2\u02a7\u02a8\3\2\2\2\u02a8g\3\2")
+ buf.write("\2\2\u02a9\u02ae\5j\66\2\u02aa\u02ab\7E\2\2\u02ab\u02ad")
+ buf.write("\5j\66\2\u02ac\u02aa\3\2\2\2\u02ad\u02b0\3\2\2\2\u02ae")
+ buf.write("\u02ac\3\2\2\2\u02ae\u02af\3\2\2\2\u02afi\3\2\2\2\u02b0")
+ buf.write("\u02ae\3\2\2\2\u02b1\u02b6\5l\67\2\u02b2\u02b3\7F\2\2")
+ buf.write("\u02b3\u02b5\5l\67\2\u02b4\u02b2\3\2\2\2\u02b5\u02b8\3")
+ buf.write("\2\2\2\u02b6\u02b4\3\2\2\2\u02b6\u02b7\3\2\2\2\u02b7k")
+ buf.write("\3\2\2\2\u02b8\u02b6\3\2\2\2\u02b9\u02be\5n8\2\u02ba\u02bb")
+ buf.write("\7G\2\2\u02bb\u02bd\5n8\2\u02bc\u02ba\3\2\2\2\u02bd\u02c0")
+ buf.write("\3\2\2\2\u02be\u02bc\3\2\2\2\u02be\u02bf\3\2\2\2\u02bf")
+ buf.write("m\3\2\2\2\u02c0\u02be\3\2\2\2\u02c1\u02c6\5p9\2\u02c2")
+ buf.write("\u02c3\7H\2\2\u02c3\u02c5\5p9\2\u02c4\u02c2\3\2\2\2\u02c5")
+ buf.write("\u02c8\3\2\2\2\u02c6\u02c4\3\2\2\2\u02c6\u02c7\3\2\2\2")
+ buf.write("\u02c7o\3\2\2\2\u02c8\u02c6\3\2\2\2\u02c9\u02ce\5r:\2")
+ buf.write("\u02ca\u02cb\7\67\2\2\u02cb\u02cd\5r:\2\u02cc\u02ca\3")
+ buf.write("\2\2\2\u02cd\u02d0\3\2\2\2\u02ce\u02cc\3\2\2\2\u02ce\u02cf")
+ buf.write("\3\2\2\2\u02cfq\3\2\2\2\u02d0\u02ce\3\2\2\2\u02d1\u02d6")
+ buf.write("\5t;\2\u02d2\u02d3\t\7\2\2\u02d3\u02d5\5t;\2\u02d4\u02d2")
+ buf.write("\3\2\2\2\u02d5\u02d8\3\2\2\2\u02d6\u02d4\3\2\2\2\u02d6")
+ buf.write("\u02d7\3\2\2\2\u02d7s\3\2\2\2\u02d8\u02d6\3\2\2\2\u02d9")
+ buf.write("\u02de\5v<\2\u02da\u02db\t\b\2\2\u02db\u02dd\5v<\2\u02dc")
+ buf.write("\u02da\3\2\2\2\u02dd\u02e0\3\2\2\2\u02de\u02dc\3\2\2\2")
+ buf.write("\u02de\u02df\3\2\2\2\u02dfu\3\2\2\2\u02e0\u02de\3\2\2")
+ buf.write("\2\u02e1\u02e6\5J&\2\u02e2\u02e3\t\t\2\2\u02e3\u02e5\5")
+ buf.write("J&\2\u02e4\u02e2\3\2\2\2\u02e5\u02e8\3\2\2\2\u02e6\u02e4")
+ buf.write("\3\2\2\2\u02e6\u02e7\3\2\2\2\u02e7w\3\2\2\2\u02e8\u02e6")
+ buf.write("\3\2\2\2\u02e9\u02f5\5\u0082B\2\u02ea\u02f5\5\u0084C\2")
+ buf.write("\u02eb\u02f5\5\u0088E\2\u02ec\u02f5\5\u008aF\2\u02ed\u02f5")
+ buf.write("\5\u008cG\2\u02ee\u02f5\5\u008eH\2\u02ef\u02f5\5\u0080")
+ buf.write("A\2\u02f0\u02f5\5z>\2\u02f1\u02f5\5|?\2\u02f2\u02f5\5")
+ buf.write("~@\2\u02f3\u02f5\5\n\6\2\u02f4\u02e9\3\2\2\2\u02f4\u02ea")
+ buf.write("\3\2\2\2\u02f4\u02eb\3\2\2\2\u02f4\u02ec\3\2\2\2\u02f4")
+ buf.write("\u02ed\3\2\2\2\u02f4\u02ee\3\2\2\2\u02f4\u02ef\3\2\2\2")
+ buf.write("\u02f4\u02f0\3\2\2\2\u02f4\u02f1\3\2\2\2\u02f4\u02f2\3")
+ buf.write("\2\2\2\u02f4\u02f3\3\2\2\2\u02f5y\3\2\2\2\u02f6\u02f8")
+ buf.write("\7Q\2\2\u02f7\u02f6\3\2\2\2\u02f7\u02f8\3\2\2\2\u02f8")
+ buf.write("\u02f9\3\2\2\2\u02f9\u02fa\7_\2\2\u02fa\u02fe\7(\2\2\u02fb")
+ buf.write("\u02fd\n\n\2\2\u02fc\u02fb\3\2\2\2\u02fd\u0300\3\2\2\2")
+ buf.write("\u02fe\u02fc\3\2\2\2\u02fe\u02ff\3\2\2\2\u02ff\u0301\3")
+ buf.write("\2\2\2\u0300\u02fe\3\2\2\2\u0301\u0302\7)\2\2\u0302\u0303")
+ buf.write("\7\4\2\2\u0303{\3\2\2\2\u0304\u0305\7R\2\2\u0305\u0309")
+ buf.write("\7\3\2\2\u0306\u0308\n\13\2\2\u0307\u0306\3\2\2\2\u0308")
+ buf.write("\u030b\3\2\2\2\u0309\u0307\3\2\2\2\u0309\u030a\3\2\2\2")
+ buf.write("\u030a\u030c\3\2\2\2\u030b\u0309\3\2\2\2\u030c\u030d\7")
+ buf.write("\26\2\2\u030d}\3\2\2\2\u030e\u030f\7S\2\2\u030f\u0313")
+ buf.write("\7\3\2\2\u0310\u0312\n\13\2\2\u0311\u0310\3\2\2\2\u0312")
+ buf.write("\u0315\3\2\2\2\u0313\u0311\3\2\2\2\u0313\u0314\3\2\2\2")
+ buf.write("\u0314\u0316\3\2\2\2\u0315\u0313\3\2\2\2\u0316\u0317\7")
+ buf.write("\26\2\2\u0317\177\3\2\2\2\u0318\u0319\7_\2\2\u0319\u031d")
+ buf.write("\7(\2\2\u031a\u031c\5\n\6\2\u031b\u031a\3\2\2\2\u031c")
+ buf.write("\u031f\3\2\2\2\u031d\u031b\3\2\2\2\u031d\u031e\3\2\2\2")
+ buf.write("\u031e\u0321\3\2\2\2\u031f\u031d\3\2\2\2\u0320\u0322\5")
+ buf.write("\u0086D\2\u0321\u0320\3\2\2\2\u0321\u0322\3\2\2\2\u0322")
+ buf.write("\u0324\3\2\2\2\u0323\u0325\5\\/\2\u0324\u0323\3\2\2\2")
+ buf.write("\u0324\u0325\3\2\2\2\u0325\u0326\3\2\2\2\u0326\u0327\7")
+ buf.write(")\2\2\u0327\u0081\3\2\2\2\u0328\u0329\7_\2\2\u0329\u032a")
+ buf.write("\7\31\2\2\u032a\u0334\5x=\2\u032b\u032c\7T\2\2\u032c\u032d")
+ buf.write("\5^\60\2\u032d\u032e\7\31\2\2\u032e\u032f\5x=\2\u032f")
+ buf.write("\u0334\3\2\2\2\u0330\u0331\7U\2\2\u0331\u0332\7\31\2\2")
+ buf.write("\u0332\u0334\5x=\2\u0333\u0328\3\2\2\2\u0333\u032b\3\2")
+ buf.write("\2\2\u0333\u0330\3\2\2\2\u0334\u0083\3\2\2\2\u0335\u0339")
+ buf.write("\7\3\2\2\u0336\u0338\5\n\6\2\u0337\u0336\3\2\2\2\u0338")
+ buf.write("\u033b\3\2\2\2\u0339\u0337\3\2\2\2\u0339\u033a\3\2\2\2")
+ buf.write("\u033a\u033d\3\2\2\2\u033b\u0339\3\2\2\2\u033c\u033e\5")
+ buf.write("\u0086D\2\u033d\u033c\3\2\2\2\u033d\u033e\3\2\2\2\u033e")
+ buf.write("\u033f\3\2\2\2\u033f\u0340\7\26\2\2\u0340\u0085\3\2\2")
+ buf.write("\2\u0341\u0343\5x=\2\u0342\u0341\3\2\2\2\u0343\u0344\3")
+ buf.write("\2\2\2\u0344\u0342\3\2\2\2\u0344\u0345\3\2\2\2\u0345\u0087")
+ buf.write("\3\2\2\2\u0346\u034b\7\4\2\2\u0347\u0348\5\\/\2\u0348")
+ buf.write("\u0349\7\4\2\2\u0349\u034b\3\2\2\2\u034a\u0346\3\2\2\2")
+ buf.write("\u034a\u0347\3\2\2\2\u034b\u0089\3\2\2\2\u034c\u034d\7")
+ buf.write("V\2\2\u034d\u034e\7(\2\2\u034e\u034f\5\\/\2\u034f\u0350")
+ buf.write("\7)\2\2\u0350\u0351\bF\1\2\u0351\u0354\5x=\2\u0352\u0353")
+ buf.write("\7W\2\2\u0353\u0355\5x=\2\u0354\u0352\3\2\2\2\u0354\u0355")
+ buf.write("\3\2\2\2\u0355\u035d\3\2\2\2\u0356\u0357\7X\2\2\u0357")
+ buf.write("\u0358\7(\2\2\u0358\u0359\5\\/\2\u0359\u035a\7)\2\2\u035a")
+ buf.write("\u035b\5x=\2\u035b\u035d\3\2\2\2\u035c\u034c\3\2\2\2\u035c")
+ buf.write("\u0356\3\2\2\2\u035d\u008b\3\2\2\2\u035e\u035f\7Y\2\2")
+ buf.write("\u035f\u0360\7(\2\2\u0360\u0361\5\\/\2\u0361\u0362\7)")
+ buf.write("\2\2\u0362\u0363\5x=\2\u0363\u0364\bG\1\2\u0364\u036f")
+ buf.write("\3\2\2\2\u0365\u0366\7Z\2\2\u0366\u0367\5x=\2\u0367\u0368")
+ buf.write("\7Y\2\2\u0368\u0369\7(\2\2\u0369\u036a\5\\/\2\u036a\u036b")
+ buf.write("\7)\2\2\u036b\u036c\7\4\2\2\u036c\u036d\bG\1\2\u036d\u036f")
+ buf.write("\3\2\2\2\u036e\u035e\3\2\2\2\u036e\u0365\3\2\2\2\u036f")
+ buf.write("\u008d\3\2\2\2\u0370\u0371\7[\2\2\u0371\u0372\7_\2\2\u0372")
+ buf.write("\u037e\7\4\2\2\u0373\u0374\7\\\2\2\u0374\u037e\7\4\2\2")
+ buf.write("\u0375\u0376\7]\2\2\u0376\u037e\7\4\2\2\u0377\u0378\7")
+ buf.write("^\2\2\u0378\u037e\7\4\2\2\u0379\u037a\7^\2\2\u037a\u037b")
+ buf.write("\5\\/\2\u037b\u037c\7\4\2\2\u037c\u037e\3\2\2\2\u037d")
+ buf.write("\u0370\3\2\2\2\u037d\u0373\3\2\2\2\u037d\u0375\3\2\2\2")
+ buf.write("\u037d\u0377\3\2\2\2\u037d\u0379\3\2\2\2\u037e\u008f\3")
+ buf.write("\2\2\2o\u0093\u0097\u009d\u00a6\u00a8\u00ab\u00b1\u00b6")
+ buf.write("\u00bd\u00bf\u00c3\u00cb\u00d0\u00d7\u00dd\u00f4\u00f9")
+ buf.write("\u00ff\u0108\u010f\u0117\u0119\u0120\u0126\u012a\u0130")
+ buf.write("\u0139\u013f\u0146\u014c\u0151\u0154\u0157\u015a\u015e")
+ buf.write("\u0164\u0169\u0170\u0172\u0184\u018a\u018d\u0192\u0197")
+ buf.write("\u019a\u019f\u01a4\u01aa\u01ac\u01b0\u01b5\u01b9\u01c0")
+ buf.write("\u01c5\u01c8\u01cc\u01cf\u01d6\u01db\u01ea\u01f0\u01f4")
+ buf.write("\u01fb\u0200\u0205\u0209\u0211\u0213\u021d\u021f\u0228")
+ buf.write("\u0239\u0258\u025a\u0262\u026d\u0276\u027c\u0280\u0285")
+ buf.write("\u0289\u0290\u029a\u02a7\u02ae\u02b6\u02be\u02c6\u02ce")
+ buf.write("\u02d6\u02de\u02e6\u02f4\u02f7\u02fe\u0309\u0313\u031d")
+ buf.write("\u0321\u0324\u0333\u0339\u033d\u0344\u034a\u0354\u035c")
+ buf.write("\u036e\u037d")
+ return buf.getvalue()
+
+
+class CParser ( Parser ):
+
+ grammarFileName = "C.g4"
+
+ atn = ATNDeserializer().deserialize(serializedATN())
+
+ decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
+
+ sharedContextCache = PredictionContextCache()
+
+ literalNames = [ "<INVALID>", "'{'", "';'", "'typedef'", "','", "'='",
+ "'extern'", "'static'", "'auto'", "'register'", "'STATIC'",
+ "'void'", "'char'", "'short'", "'int'", "'long'", "'float'",
+ "'double'", "'signed'", "'unsigned'", "'}'", "'struct'",
+ "'union'", "':'", "'enum'", "'const'", "'volatile'",
+ "'IN'", "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'",
+ "'VOLATILE'", "'GLOBAL_REMOVE_IF_UNREFERENCED'", "'EFIAPI'",
+ "'EFI_BOOTSERVICE'", "'EFI_RUNTIMESERVICE'", "'PACKED'",
+ "'('", "')'", "'['", "']'", "'*'", "'...'", "'+'",
+ "'-'", "'/'", "'%'", "'++'", "'--'", "'sizeof'", "'.'",
+ "'->'", "'&'", "'~'", "'!'", "'*='", "'/='", "'%='",
+ "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
+ "'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='",
+ "'<'", "'>'", "'<='", "'>='", "'<<'", "'>>'", "'__asm__'",
+ "'_asm'", "'__asm'", "'case'", "'default'", "'if'",
+ "'else'", "'switch'", "'while'", "'do'", "'goto'",
+ "'continue'", "'break'", "'return'" ]
+
+ symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "IDENTIFIER", "CHARACTER_LITERAL", "STRING_LITERAL",
+ "HEX_LITERAL", "DECIMAL_LITERAL", "OCTAL_LITERAL",
+ "FLOATING_POINT_LITERAL", "WS", "BS", "UnicodeVocabulary",
+ "COMMENT", "LINE_COMMENT", "LINE_COMMAND" ]
+
+ RULE_translation_unit = 0
+ RULE_external_declaration = 1
+ RULE_function_definition = 2
+ RULE_declaration_specifiers = 3
+ RULE_declaration = 4
+ RULE_init_declarator_list = 5
+ RULE_init_declarator = 6
+ RULE_storage_class_specifier = 7
+ RULE_type_specifier = 8
+ RULE_type_id = 9
+ RULE_struct_or_union_specifier = 10
+ RULE_struct_or_union = 11
+ RULE_struct_declaration_list = 12
+ RULE_struct_declaration = 13
+ RULE_specifier_qualifier_list = 14
+ RULE_struct_declarator_list = 15
+ RULE_struct_declarator = 16
+ RULE_enum_specifier = 17
+ RULE_enumerator_list = 18
+ RULE_enumerator = 19
+ RULE_type_qualifier = 20
+ RULE_declarator = 21
+ RULE_direct_declarator = 22
+ RULE_declarator_suffix = 23
+ RULE_pointer = 24
+ RULE_parameter_type_list = 25
+ RULE_parameter_list = 26
+ RULE_parameter_declaration = 27
+ RULE_identifier_list = 28
+ RULE_type_name = 29
+ RULE_abstract_declarator = 30
+ RULE_direct_abstract_declarator = 31
+ RULE_abstract_declarator_suffix = 32
+ RULE_initializer = 33
+ RULE_initializer_list = 34
+ RULE_argument_expression_list = 35
+ RULE_additive_expression = 36
+ RULE_multiplicative_expression = 37
+ RULE_cast_expression = 38
+ RULE_unary_expression = 39
+ RULE_postfix_expression = 40
+ RULE_macro_parameter_list = 41
+ RULE_unary_operator = 42
+ RULE_primary_expression = 43
+ RULE_constant = 44
+ RULE_expression = 45
+ RULE_constant_expression = 46
+ RULE_assignment_expression = 47
+ RULE_lvalue = 48
+ RULE_assignment_operator = 49
+ RULE_conditional_expression = 50
+ RULE_logical_or_expression = 51
+ RULE_logical_and_expression = 52
+ RULE_inclusive_or_expression = 53
+ RULE_exclusive_or_expression = 54
+ RULE_and_expression = 55
+ RULE_equality_expression = 56
+ RULE_relational_expression = 57
+ RULE_shift_expression = 58
+ RULE_statement = 59
+ RULE_asm2_statement = 60
+ RULE_asm1_statement = 61
+ RULE_asm_statement = 62
+ RULE_macro_statement = 63
+ RULE_labeled_statement = 64
+ RULE_compound_statement = 65
+ RULE_statement_list = 66
+ RULE_expression_statement = 67
+ RULE_selection_statement = 68
+ RULE_iteration_statement = 69
+ RULE_jump_statement = 70
+
+ ruleNames = [ "translation_unit", "external_declaration", "function_definition",
+ "declaration_specifiers", "declaration", "init_declarator_list",
+ "init_declarator", "storage_class_specifier", "type_specifier",
+ "type_id", "struct_or_union_specifier", "struct_or_union",
+ "struct_declaration_list", "struct_declaration", "specifier_qualifier_list",
+ "struct_declarator_list", "struct_declarator", "enum_specifier",
+ "enumerator_list", "enumerator", "type_qualifier", "declarator",
+ "direct_declarator", "declarator_suffix", "pointer",
+ "parameter_type_list", "parameter_list", "parameter_declaration",
+ "identifier_list", "type_name", "abstract_declarator",
+ "direct_abstract_declarator", "abstract_declarator_suffix",
+ "initializer", "initializer_list", "argument_expression_list",
+ "additive_expression", "multiplicative_expression", "cast_expression",
+ "unary_expression", "postfix_expression", "macro_parameter_list",
+ "unary_operator", "primary_expression", "constant", "expression",
+ "constant_expression", "assignment_expression", "lvalue",
+ "assignment_operator", "conditional_expression", "logical_or_expression",
+ "logical_and_expression", "inclusive_or_expression",
+ "exclusive_or_expression", "and_expression", "equality_expression",
+ "relational_expression", "shift_expression", "statement",
+ "asm2_statement", "asm1_statement", "asm_statement",
+ "macro_statement", "labeled_statement", "compound_statement",
+ "statement_list", "expression_statement", "selection_statement",
+ "iteration_statement", "jump_statement" ]
+
+ EOF = Token.EOF
+ T__0=1
+ T__1=2
+ T__2=3
+ T__3=4
+ T__4=5
+ T__5=6
+ T__6=7
+ T__7=8
+ T__8=9
+ T__9=10
+ T__10=11
+ T__11=12
+ T__12=13
+ T__13=14
+ T__14=15
+ T__15=16
+ T__16=17
+ T__17=18
+ T__18=19
+ T__19=20
+ T__20=21
+ T__21=22
+ T__22=23
+ T__23=24
+ T__24=25
+ T__25=26
+ T__26=27
+ T__27=28
+ T__28=29
+ T__29=30
+ T__30=31
+ T__31=32
+ T__32=33
+ T__33=34
+ T__34=35
+ T__35=36
+ T__36=37
+ T__37=38
+ T__38=39
+ T__39=40
+ T__40=41
+ T__41=42
+ T__42=43
+ T__43=44
+ T__44=45
+ T__45=46
+ T__46=47
+ T__47=48
+ T__48=49
+ T__49=50
+ T__50=51
+ T__51=52
+ T__52=53
+ T__53=54
+ T__54=55
+ T__55=56
+ T__56=57
+ T__57=58
+ T__58=59
+ T__59=60
+ T__60=61
+ T__61=62
+ T__62=63
+ T__63=64
+ T__64=65
+ T__65=66
+ T__66=67
+ T__67=68
+ T__68=69
+ T__69=70
+ T__70=71
+ T__71=72
+ T__72=73
+ T__73=74
+ T__74=75
+ T__75=76
+ T__76=77
+ T__77=78
+ T__78=79
+ T__79=80
+ T__80=81
+ T__81=82
+ T__82=83
+ T__83=84
+ T__84=85
+ T__85=86
+ T__86=87
+ T__87=88
+ T__88=89
+ T__89=90
+ T__90=91
+ T__91=92
+ IDENTIFIER=93
+ CHARACTER_LITERAL=94
+ STRING_LITERAL=95
+ HEX_LITERAL=96
+ DECIMAL_LITERAL=97
+ OCTAL_LITERAL=98
+ FLOATING_POINT_LITERAL=99
+ WS=100
+ BS=101
+ UnicodeVocabulary=102
+ COMMENT=103
+ LINE_COMMENT=104
+ LINE_COMMAND=105
+
+ # @param input Type: TokenStream
+ # @param output= sys.stdout Type: TextIO
+ def __init__(self,input,output= sys.stdout):
+ super().__init__(input, output)
+ self.checkVersion("4.7.1")
+ self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
+ self._predicates = None
+
+
+
+
+ def printTokenInfo(self,line,offset,tokenText):
+ print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
+
+ def StorePredicateExpression(self,StartLine,StartOffset,EndLine,EndOffset,Text):
+ PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.PredicateExpressionList.append(PredExp)
+
+ def StoreEnumerationDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
+ EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.EnumerationDefinitionList.append(EnumDef)
+
+ def StoreStructUnionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
+ SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.StructUnionDefinitionList.append(SUDef)
+
+ def StoreTypedefDefinition(self,StartLine,StartOffset,EndLine,EndOffset,FromText,ToText):
+ Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.TypedefDefinitionList.append(Tdef)
+
+ def StoreFunctionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText,LeftBraceLine,LeftBraceOffset,DeclLine,DeclOffset):
+ FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
+ FileProfile.FunctionDefinitionList.append(FuncDef)
+
+ def StoreVariableDeclaration(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText):
+ VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.VariableDeclarationList.append(VarDecl)
+
+ def StoreFunctionCalling(self,StartLine,StartOffset,EndLine,EndOffset,FuncName,ParamList):
+ FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.FunctionCallingList.append(FuncCall)
+
+
+
+ class Translation_unitContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def external_declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.External_declarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.External_declarationContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_translation_unit
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterTranslation_unit" ):
+ listener.enterTranslation_unit(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitTranslation_unit" ):
+ listener.exitTranslation_unit(self)
+
+
+
+
+ def translation_unit(self):
+
+ localctx = CParser.Translation_unitContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 0, self.RULE_translation_unit)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 145
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__41))) != 0) or _la==CParser.IDENTIFIER:
+ self.state = 142
+ self.external_declaration()
+ self.state = 147
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class External_declarationContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def declarator(self):
+ return self.getTypedRuleContext(CParser.DeclaratorContext,0)
+
+
+ def declaration_specifiers(self):
+ return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
+
+
+ # @param i=None Type: int
+ def declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.DeclarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.DeclarationContext,i)
+
+
+ def function_definition(self):
+ return self.getTypedRuleContext(CParser.Function_definitionContext,0)
+
+
+ def macro_statement(self):
+ return self.getTypedRuleContext(CParser.Macro_statementContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_external_declaration
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterExternal_declaration" ):
+ listener.enterExternal_declaration(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitExternal_declaration" ):
+ listener.exitExternal_declaration(self)
+
+
+
+
+ def external_declaration(self):
+
+ localctx = CParser.External_declarationContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 2, self.RULE_external_declaration)
+ self._la = 0 # Token type
+ try:
+ self.state = 166
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,4,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 149
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,1,self._ctx)
+ if la_ == 1:
+ self.state = 148
+ self.declaration_specifiers()
+
+
+ self.state = 151
+ self.declarator()
+ self.state = 155
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0) or _la==CParser.IDENTIFIER:
+ self.state = 152
+ self.declaration()
+ self.state = 157
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 158
+ self.match(CParser.T__0)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 160
+ self.function_definition()
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 161
+ self.declaration()
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 162
+ self.macro_statement()
+ self.state = 164
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__1:
+ self.state = 163
+ self.match(CParser.T__1)
+
+
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Function_definitionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.ModifierText = ''
+ self.DeclText = ''
+ self.LBLine = 0
+ self.LBOffset = 0
+ self.DeclLine = 0
+ self.DeclOffset = 0
+ self.d = None # Declaration_specifiersContext
+ self._declaration_specifiers = None # Declaration_specifiersContext
+ self._declarator = None # DeclaratorContext
+ self.a = None # Compound_statementContext
+ self.b = None # Compound_statementContext
+
+ def declarator(self):
+ return self.getTypedRuleContext(CParser.DeclaratorContext,0)
+
+
+ def compound_statement(self):
+ return self.getTypedRuleContext(CParser.Compound_statementContext,0)
+
+
+ def declaration_specifiers(self):
+ return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
+
+
+ # @param i=None Type: int
+ def declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.DeclarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.DeclarationContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_function_definition
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterFunction_definition" ):
+ listener.enterFunction_definition(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitFunction_definition" ):
+ listener.exitFunction_definition(self)
+
+
+
+
+ def function_definition(self):
+
+ localctx = CParser.Function_definitionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 4, self.RULE_function_definition)
+
+ ModifierText = '';
+ DeclText = '';
+ LBLine = 0;
+ LBOffset = 0;
+ DeclLine = 0;
+ DeclOffset = 0;
+
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 169
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,5,self._ctx)
+ if la_ == 1:
+ self.state = 168
+ localctx.d = localctx._declaration_specifiers = self.declaration_specifiers()
+
+
+ self.state = 171
+ localctx._declarator = self.declarator()
+ self.state = 180
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__2, CParser.T__5, CParser.T__6, CParser.T__7, CParser.T__8, CParser.T__9, CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36, CParser.IDENTIFIER]:
+ self.state = 173
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while True:
+ self.state = 172
+ self.declaration()
+ self.state = 175
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0) or _la==CParser.IDENTIFIER):
+ break
+
+ self.state = 177
+ localctx.a = self.compound_statement()
+ pass
+ elif token in [CParser.T__0]:
+ self.state = 179
+ localctx.b = self.compound_statement()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+
+ if localctx.d != None:
+ ModifierText = (None if localctx._declaration_specifiers is None else self._input.getText((localctx._declaration_specifiers.start,localctx._declaration_specifiers.stop)))
+ else:
+ ModifierText = ''
+ DeclText = (None if localctx._declarator is None else self._input.getText((localctx._declarator.start,localctx._declarator.stop)))
+ DeclLine = (None if localctx._declarator is None else localctx._declarator.start).line
+ DeclOffset = (None if localctx._declarator is None else localctx._declarator.start).column
+ if localctx.a != None:
+ LBLine = (None if localctx.a is None else localctx.a.start).line
+ LBOffset = (None if localctx.a is None else localctx.a.start).column
+ else:
+ LBLine = (None if localctx.b is None else localctx.b.start).line
+ LBOffset = (None if localctx.b is None else localctx.b.start).column
+
+ self._ctx.stop = self._input.LT(-1)
+
+ self.StoreFunctionDefinition(localctx.start.line, localctx.start.column, localctx.stop.line, localctx.stop.column, ModifierText, DeclText, LBLine, LBOffset, DeclLine, DeclOffset)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Declaration_specifiersContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def storage_class_specifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Storage_class_specifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Storage_class_specifierContext,i)
+
+
+ # @param i=None Type: int
+ def type_specifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Type_specifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Type_specifierContext,i)
+
+
+ # @param i=None Type: int
+ def type_qualifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Type_qualifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_declaration_specifiers
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterDeclaration_specifiers" ):
+ listener.enterDeclaration_specifiers(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitDeclaration_specifiers" ):
+ listener.exitDeclaration_specifiers(self)
+
+
+
+
+ def declaration_specifiers(self):
+
+ localctx = CParser.Declaration_specifiersContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 6, self.RULE_declaration_specifiers)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 187
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 187
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__5, CParser.T__6, CParser.T__7, CParser.T__8, CParser.T__9]:
+ self.state = 184
+ self.storage_class_specifier()
+ pass
+ elif token in [CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.IDENTIFIER]:
+ self.state = 185
+ self.type_specifier()
+ pass
+ elif token in [CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36]:
+ self.state = 186
+ self.type_qualifier()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 189
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,9,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class DeclarationContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.a = None # Token
+ self.b = None # Declaration_specifiersContext
+ self.c = None # Init_declarator_listContext
+ self.d = None # Token
+ self.s = None # Declaration_specifiersContext
+ self.t = None # Init_declarator_listContext
+ self.e = None # Token
+
+ def init_declarator_list(self):
+ return self.getTypedRuleContext(CParser.Init_declarator_listContext,0)
+
+
+ def declaration_specifiers(self):
+ return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_declaration
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterDeclaration" ):
+ listener.enterDeclaration(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitDeclaration" ):
+ listener.exitDeclaration(self)
+
+
+
+
+ def declaration(self):
+
+ localctx = CParser.DeclarationContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 8, self.RULE_declaration)
+ self._la = 0 # Token type
+ try:
+ self.state = 206
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__2]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 191
+ localctx.a = self.match(CParser.T__2)
+ self.state = 193
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,10,self._ctx)
+ if la_ == 1:
+ self.state = 192
+ localctx.b = self.declaration_specifiers()
+
+
+ self.state = 195
+ localctx.c = self.init_declarator_list()
+ self.state = 196
+ localctx.d = self.match(CParser.T__1)
+
+ if localctx.b is not None:
+ self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, (0 if localctx.d is None else localctx.d.line), localctx.d.column, (None if localctx.b is None else self._input.getText((localctx.b.start,localctx.b.stop))), (None if localctx.c is None else self._input.getText((localctx.c.start,localctx.c.stop))))
+ else:
+ self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, (0 if localctx.d is None else localctx.d.line), localctx.d.column, '', (None if localctx.c is None else self._input.getText((localctx.c.start,localctx.c.stop))))
+
+ pass
+ elif token in [CParser.T__5, CParser.T__6, CParser.T__7, CParser.T__8, CParser.T__9, CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36, CParser.IDENTIFIER]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 199
+ localctx.s = self.declaration_specifiers()
+ self.state = 201
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if ((((_la - 34)) & ~0x3f) == 0 and ((1 << (_la - 34)) & ((1 << (CParser.T__33 - 34)) | (1 << (CParser.T__34 - 34)) | (1 << (CParser.T__35 - 34)) | (1 << (CParser.T__37 - 34)) | (1 << (CParser.T__41 - 34)) | (1 << (CParser.IDENTIFIER - 34)))) != 0):
+ self.state = 200
+ localctx.t = self.init_declarator_list()
+
+
+ self.state = 203
+ localctx.e = self.match(CParser.T__1)
+
+ if localctx.t is not None:
+ self.StoreVariableDeclaration((None if localctx.s is None else localctx.s.start).line, (None if localctx.s is None else localctx.s.start).column, (None if localctx.t is None else localctx.t.start).line, (None if localctx.t is None else localctx.t.start).column, (None if localctx.s is None else self._input.getText((localctx.s.start,localctx.s.stop))), (None if localctx.t is None else self._input.getText((localctx.t.start,localctx.t.stop))))
+
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Init_declarator_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def init_declarator(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Init_declaratorContext)
+ else:
+ return self.getTypedRuleContext(CParser.Init_declaratorContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_init_declarator_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterInit_declarator_list" ):
+ listener.enterInit_declarator_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitInit_declarator_list" ):
+ listener.exitInit_declarator_list(self)
+
+
+
+
+ def init_declarator_list(self):
+
+ localctx = CParser.Init_declarator_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 10, self.RULE_init_declarator_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 208
+ self.init_declarator()
+ self.state = 213
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__3:
+ self.state = 209
+ self.match(CParser.T__3)
+ self.state = 210
+ self.init_declarator()
+ self.state = 215
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Init_declaratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def declarator(self):
+ return self.getTypedRuleContext(CParser.DeclaratorContext,0)
+
+
+ def initializer(self):
+ return self.getTypedRuleContext(CParser.InitializerContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_init_declarator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterInit_declarator" ):
+ listener.enterInit_declarator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitInit_declarator" ):
+ listener.exitInit_declarator(self)
+
+
+
+
+ def init_declarator(self):
+
+ localctx = CParser.Init_declaratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 12, self.RULE_init_declarator)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 216
+ self.declarator()
+ self.state = 219
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__4:
+ self.state = 217
+ self.match(CParser.T__4)
+ self.state = 218
+ self.initializer()
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Storage_class_specifierContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_storage_class_specifier
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStorage_class_specifier" ):
+ listener.enterStorage_class_specifier(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStorage_class_specifier" ):
+ listener.exitStorage_class_specifier(self)
+
+
+
+
+ def storage_class_specifier(self):
+
+ localctx = CParser.Storage_class_specifierContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 14, self.RULE_storage_class_specifier)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 221
+ _la = self._input.LA(1)
+ if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9))) != 0)):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Type_specifierContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.s = None # Struct_or_union_specifierContext
+ self.e = None # Enum_specifierContext
+
+ def struct_or_union_specifier(self):
+ return self.getTypedRuleContext(CParser.Struct_or_union_specifierContext,0)
+
+
+ def enum_specifier(self):
+ return self.getTypedRuleContext(CParser.Enum_specifierContext,0)
+
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def declarator(self):
+ return self.getTypedRuleContext(CParser.DeclaratorContext,0)
+
+
+ # @param i=None Type: int
+ def type_qualifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Type_qualifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
+
+
+ def type_id(self):
+ return self.getTypedRuleContext(CParser.Type_idContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_type_specifier
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterType_specifier" ):
+ listener.enterType_specifier(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitType_specifier" ):
+ listener.exitType_specifier(self)
+
+
+
+
+ def type_specifier(self):
+
+ localctx = CParser.Type_specifierContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 16, self.RULE_type_specifier)
+ try:
+ self.state = 247
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,16,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 223
+ self.match(CParser.T__10)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 224
+ self.match(CParser.T__11)
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 225
+ self.match(CParser.T__12)
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 226
+ self.match(CParser.T__13)
+ pass
+
+ elif la_ == 5:
+ self.enterOuterAlt(localctx, 5)
+ self.state = 227
+ self.match(CParser.T__14)
+ pass
+
+ elif la_ == 6:
+ self.enterOuterAlt(localctx, 6)
+ self.state = 228
+ self.match(CParser.T__15)
+ pass
+
+ elif la_ == 7:
+ self.enterOuterAlt(localctx, 7)
+ self.state = 229
+ self.match(CParser.T__16)
+ pass
+
+ elif la_ == 8:
+ self.enterOuterAlt(localctx, 8)
+ self.state = 230
+ self.match(CParser.T__17)
+ pass
+
+ elif la_ == 9:
+ self.enterOuterAlt(localctx, 9)
+ self.state = 231
+ self.match(CParser.T__18)
+ pass
+
+ elif la_ == 10:
+ self.enterOuterAlt(localctx, 10)
+ self.state = 232
+ localctx.s = self.struct_or_union_specifier()
+
+ if localctx.s.stop is not None:
+ self.StoreStructUnionDefinition((None if localctx.s is None else localctx.s.start).line, (None if localctx.s is None else localctx.s.start).column, (None if localctx.s is None else localctx.s.stop).line, (None if localctx.s is None else localctx.s.stop).column, (None if localctx.s is None else self._input.getText((localctx.s.start,localctx.s.stop))))
+
+ pass
+
+ elif la_ == 11:
+ self.enterOuterAlt(localctx, 11)
+ self.state = 235
+ localctx.e = self.enum_specifier()
+
+ if localctx.e.stop is not None:
+ self.StoreEnumerationDefinition((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
+
+ pass
+
+ elif la_ == 12:
+ self.enterOuterAlt(localctx, 12)
+ self.state = 238
+ self.match(CParser.IDENTIFIER)
+ self.state = 242
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,15,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 239
+ self.type_qualifier()
+ self.state = 244
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,15,self._ctx)
+
+ self.state = 245
+ self.declarator()
+ pass
+
+ elif la_ == 13:
+ self.enterOuterAlt(localctx, 13)
+ self.state = 246
+ self.type_id()
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Type_idContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def getRuleIndex(self):
+ return CParser.RULE_type_id
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterType_id" ):
+ listener.enterType_id(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitType_id" ):
+ listener.exitType_id(self)
+
+
+
+
+ def type_id(self):
+
+ localctx = CParser.Type_idContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 18, self.RULE_type_id)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 249
+ self.match(CParser.IDENTIFIER)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Struct_or_union_specifierContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def struct_or_union(self):
+ return self.getTypedRuleContext(CParser.Struct_or_unionContext,0)
+
+
+ def struct_declaration_list(self):
+ return self.getTypedRuleContext(CParser.Struct_declaration_listContext,0)
+
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def getRuleIndex(self):
+ return CParser.RULE_struct_or_union_specifier
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStruct_or_union_specifier" ):
+ listener.enterStruct_or_union_specifier(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStruct_or_union_specifier" ):
+ listener.exitStruct_or_union_specifier(self)
+
+
+
+
+ def struct_or_union_specifier(self):
+
+ localctx = CParser.Struct_or_union_specifierContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 20, self.RULE_struct_or_union_specifier)
+ self._la = 0 # Token type
+ try:
+ self.state = 262
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,18,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 251
+ self.struct_or_union()
+ self.state = 253
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.IDENTIFIER:
+ self.state = 252
+ self.match(CParser.IDENTIFIER)
+
+
+ self.state = 255
+ self.match(CParser.T__0)
+ self.state = 256
+ self.struct_declaration_list()
+ self.state = 257
+ self.match(CParser.T__19)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 259
+ self.struct_or_union()
+ self.state = 260
+ self.match(CParser.IDENTIFIER)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Struct_or_unionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_struct_or_union
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStruct_or_union" ):
+ listener.enterStruct_or_union(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStruct_or_union" ):
+ listener.exitStruct_or_union(self)
+
+
+
+
+ def struct_or_union(self):
+
+ localctx = CParser.Struct_or_unionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 22, self.RULE_struct_or_union)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 264
+ _la = self._input.LA(1)
+ if not(_la==CParser.T__20 or _la==CParser.T__21):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Struct_declaration_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def struct_declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Struct_declarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.Struct_declarationContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_struct_declaration_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStruct_declaration_list" ):
+ listener.enterStruct_declaration_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStruct_declaration_list" ):
+ listener.exitStruct_declaration_list(self)
+
+
+
+
+ def struct_declaration_list(self):
+
+ localctx = CParser.Struct_declaration_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 24, self.RULE_struct_declaration_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 267
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while True:
+ self.state = 266
+ self.struct_declaration()
+ self.state = 269
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0) or _la==CParser.IDENTIFIER):
+ break
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Struct_declarationContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def specifier_qualifier_list(self):
+ return self.getTypedRuleContext(CParser.Specifier_qualifier_listContext,0)
+
+
+ def struct_declarator_list(self):
+ return self.getTypedRuleContext(CParser.Struct_declarator_listContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_struct_declaration
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStruct_declaration" ):
+ listener.enterStruct_declaration(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStruct_declaration" ):
+ listener.exitStruct_declaration(self)
+
+
+
+
+ def struct_declaration(self):
+
+ localctx = CParser.Struct_declarationContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 26, self.RULE_struct_declaration)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 271
+ self.specifier_qualifier_list()
+ self.state = 272
+ self.struct_declarator_list()
+ self.state = 273
+ self.match(CParser.T__1)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Specifier_qualifier_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def type_qualifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Type_qualifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
+
+
+ # @param i=None Type: int
+ def type_specifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Type_specifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Type_specifierContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_specifier_qualifier_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterSpecifier_qualifier_list" ):
+ listener.enterSpecifier_qualifier_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitSpecifier_qualifier_list" ):
+ listener.exitSpecifier_qualifier_list(self)
+
+
+
+
+ def specifier_qualifier_list(self):
+
+ localctx = CParser.Specifier_qualifier_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 28, self.RULE_specifier_qualifier_list)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 277
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 277
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36]:
+ self.state = 275
+ self.type_qualifier()
+ pass
+ elif token in [CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.IDENTIFIER]:
+ self.state = 276
+ self.type_specifier()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 279
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,21,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Struct_declarator_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def struct_declarator(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Struct_declaratorContext)
+ else:
+ return self.getTypedRuleContext(CParser.Struct_declaratorContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_struct_declarator_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStruct_declarator_list" ):
+ listener.enterStruct_declarator_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStruct_declarator_list" ):
+ listener.exitStruct_declarator_list(self)
+
+
+
+
+ def struct_declarator_list(self):
+
+ localctx = CParser.Struct_declarator_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 30, self.RULE_struct_declarator_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 281
+ self.struct_declarator()
+ self.state = 286
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__3:
+ self.state = 282
+ self.match(CParser.T__3)
+ self.state = 283
+ self.struct_declarator()
+ self.state = 288
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Struct_declaratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def declarator(self):
+ return self.getTypedRuleContext(CParser.DeclaratorContext,0)
+
+
+ def constant_expression(self):
+ return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_struct_declarator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStruct_declarator" ):
+ listener.enterStruct_declarator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStruct_declarator" ):
+ listener.exitStruct_declarator(self)
+
+
+
+
+ def struct_declarator(self):
+
+ localctx = CParser.Struct_declaratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 32, self.RULE_struct_declarator)
+ self._la = 0 # Token type
+ try:
+ self.state = 296
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__37, CParser.T__41, CParser.IDENTIFIER]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 289
+ self.declarator()
+ self.state = 292
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__22:
+ self.state = 290
+ self.match(CParser.T__22)
+ self.state = 291
+ self.constant_expression()
+
+
+ pass
+ elif token in [CParser.T__22]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 294
+ self.match(CParser.T__22)
+ self.state = 295
+ self.constant_expression()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Enum_specifierContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def enumerator_list(self):
+ return self.getTypedRuleContext(CParser.Enumerator_listContext,0)
+
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def getRuleIndex(self):
+ return CParser.RULE_enum_specifier
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterEnum_specifier" ):
+ listener.enterEnum_specifier(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitEnum_specifier" ):
+ listener.exitEnum_specifier(self)
+
+
+
+
+ def enum_specifier(self):
+
+ localctx = CParser.Enum_specifierContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 34, self.RULE_enum_specifier)
+ self._la = 0 # Token type
+ try:
+ self.state = 317
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,27,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 298
+ self.match(CParser.T__23)
+ self.state = 299
+ self.match(CParser.T__0)
+ self.state = 300
+ self.enumerator_list()
+ self.state = 302
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__3:
+ self.state = 301
+ self.match(CParser.T__3)
+
+
+ self.state = 304
+ self.match(CParser.T__19)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 306
+ self.match(CParser.T__23)
+ self.state = 307
+ self.match(CParser.IDENTIFIER)
+ self.state = 308
+ self.match(CParser.T__0)
+ self.state = 309
+ self.enumerator_list()
+ self.state = 311
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__3:
+ self.state = 310
+ self.match(CParser.T__3)
+
+
+ self.state = 313
+ self.match(CParser.T__19)
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 315
+ self.match(CParser.T__23)
+ self.state = 316
+ self.match(CParser.IDENTIFIER)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Enumerator_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def enumerator(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.EnumeratorContext)
+ else:
+ return self.getTypedRuleContext(CParser.EnumeratorContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_enumerator_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterEnumerator_list" ):
+ listener.enterEnumerator_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitEnumerator_list" ):
+ listener.exitEnumerator_list(self)
+
+
+
+
+ def enumerator_list(self):
+
+ localctx = CParser.Enumerator_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 36, self.RULE_enumerator_list)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 319
+ self.enumerator()
+ self.state = 324
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,28,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 320
+ self.match(CParser.T__3)
+ self.state = 321
+ self.enumerator()
+ self.state = 326
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,28,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class EnumeratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def constant_expression(self):
+ return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_enumerator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterEnumerator" ):
+ listener.enterEnumerator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitEnumerator" ):
+ listener.exitEnumerator(self)
+
+
+
+
+ def enumerator(self):
+
+ localctx = CParser.EnumeratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 38, self.RULE_enumerator)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 327
+ self.match(CParser.IDENTIFIER)
+ self.state = 330
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__4:
+ self.state = 328
+ self.match(CParser.T__4)
+ self.state = 329
+ self.constant_expression()
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Type_qualifierContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_type_qualifier
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterType_qualifier" ):
+ listener.enterType_qualifier(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitType_qualifier" ):
+ listener.exitType_qualifier(self)
+
+
+
+
+ def type_qualifier(self):
+
+ localctx = CParser.Type_qualifierContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 40, self.RULE_type_qualifier)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 332
+ _la = self._input.LA(1)
+ if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0)):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class DeclaratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def direct_declarator(self):
+ return self.getTypedRuleContext(CParser.Direct_declaratorContext,0)
+
+
+ def pointer(self):
+ return self.getTypedRuleContext(CParser.PointerContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_declarator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterDeclarator" ):
+ listener.enterDeclarator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitDeclarator" ):
+ listener.exitDeclarator(self)
+
+
+
+
+ def declarator(self):
+
+ localctx = CParser.DeclaratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 42, self.RULE_declarator)
+ self._la = 0 # Token type
+ try:
+ self.state = 348
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,34,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 335
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__41:
+ self.state = 334
+ self.pointer()
+
+
+ self.state = 338
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__33:
+ self.state = 337
+ self.match(CParser.T__33)
+
+
+ self.state = 341
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__34:
+ self.state = 340
+ self.match(CParser.T__34)
+
+
+ self.state = 344
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__35:
+ self.state = 343
+ self.match(CParser.T__35)
+
+
+ self.state = 346
+ self.direct_declarator()
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 347
+ self.pointer()
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Direct_declaratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ # @param i=None Type: int
+ def declarator_suffix(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Declarator_suffixContext)
+ else:
+ return self.getTypedRuleContext(CParser.Declarator_suffixContext,i)
+
+
+ def declarator(self):
+ return self.getTypedRuleContext(CParser.DeclaratorContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_direct_declarator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterDirect_declarator" ):
+ listener.enterDirect_declarator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitDirect_declarator" ):
+ listener.exitDirect_declarator(self)
+
+
+
+
+ def direct_declarator(self):
+
+ localctx = CParser.Direct_declaratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 44, self.RULE_direct_declarator)
+ try:
+ self.state = 368
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.IDENTIFIER]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 350
+ self.match(CParser.IDENTIFIER)
+ self.state = 354
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,35,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 351
+ self.declarator_suffix()
+ self.state = 356
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,35,self._ctx)
+
+ pass
+ elif token in [CParser.T__37]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 357
+ self.match(CParser.T__37)
+ self.state = 359
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,36,self._ctx)
+ if la_ == 1:
+ self.state = 358
+ self.match(CParser.T__33)
+
+
+ self.state = 361
+ self.declarator()
+ self.state = 362
+ self.match(CParser.T__38)
+ self.state = 364
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 363
+ self.declarator_suffix()
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 366
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,37,self._ctx)
+
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Declarator_suffixContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def constant_expression(self):
+ return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
+
+
+ def parameter_type_list(self):
+ return self.getTypedRuleContext(CParser.Parameter_type_listContext,0)
+
+
+ def identifier_list(self):
+ return self.getTypedRuleContext(CParser.Identifier_listContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_declarator_suffix
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterDeclarator_suffix" ):
+ listener.enterDeclarator_suffix(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitDeclarator_suffix" ):
+ listener.exitDeclarator_suffix(self)
+
+
+
+
+ def declarator_suffix(self):
+
+ localctx = CParser.Declarator_suffixContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 46, self.RULE_declarator_suffix)
+ try:
+ self.state = 386
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,39,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 370
+ self.match(CParser.T__39)
+ self.state = 371
+ self.constant_expression()
+ self.state = 372
+ self.match(CParser.T__40)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 374
+ self.match(CParser.T__39)
+ self.state = 375
+ self.match(CParser.T__40)
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 376
+ self.match(CParser.T__37)
+ self.state = 377
+ self.parameter_type_list()
+ self.state = 378
+ self.match(CParser.T__38)
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 380
+ self.match(CParser.T__37)
+ self.state = 381
+ self.identifier_list()
+ self.state = 382
+ self.match(CParser.T__38)
+ pass
+
+ elif la_ == 5:
+ self.enterOuterAlt(localctx, 5)
+ self.state = 384
+ self.match(CParser.T__37)
+ self.state = 385
+ self.match(CParser.T__38)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class PointerContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def type_qualifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Type_qualifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
+
+
+ def pointer(self):
+ return self.getTypedRuleContext(CParser.PointerContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_pointer
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterPointer" ):
+ listener.enterPointer(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitPointer" ):
+ listener.exitPointer(self)
+
+
+
+
+ def pointer(self):
+
+ localctx = CParser.PointerContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 48, self.RULE_pointer)
+ try:
+ self.state = 400
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,42,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 388
+ self.match(CParser.T__41)
+ self.state = 390
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 389
+ self.type_qualifier()
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 392
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,40,self._ctx)
+
+ self.state = 395
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,41,self._ctx)
+ if la_ == 1:
+ self.state = 394
+ self.pointer()
+
+
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 397
+ self.match(CParser.T__41)
+ self.state = 398
+ self.pointer()
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 399
+ self.match(CParser.T__41)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Parameter_type_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def parameter_list(self):
+ return self.getTypedRuleContext(CParser.Parameter_listContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_parameter_type_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterParameter_type_list" ):
+ listener.enterParameter_type_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitParameter_type_list" ):
+ listener.exitParameter_type_list(self)
+
+
+
+
+ def parameter_type_list(self):
+
+ localctx = CParser.Parameter_type_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 50, self.RULE_parameter_type_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 402
+ self.parameter_list()
+ self.state = 408
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__3:
+ self.state = 403
+ self.match(CParser.T__3)
+ self.state = 405
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__28:
+ self.state = 404
+ self.match(CParser.T__28)
+
+
+ self.state = 407
+ self.match(CParser.T__42)
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Parameter_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def parameter_declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Parameter_declarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.Parameter_declarationContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_parameter_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterParameter_list" ):
+ listener.enterParameter_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitParameter_list" ):
+ listener.exitParameter_list(self)
+
+
+
+
+ def parameter_list(self):
+
+ localctx = CParser.Parameter_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 52, self.RULE_parameter_list)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 410
+ self.parameter_declaration()
+ self.state = 418
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,46,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 411
+ self.match(CParser.T__3)
+ self.state = 413
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,45,self._ctx)
+ if la_ == 1:
+ self.state = 412
+ self.match(CParser.T__28)
+
+
+ self.state = 415
+ self.parameter_declaration()
+ self.state = 420
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,46,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Parameter_declarationContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def declaration_specifiers(self):
+ return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
+
+
+ # @param i=None Type: int
+ def declarator(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.DeclaratorContext)
+ else:
+ return self.getTypedRuleContext(CParser.DeclaratorContext,i)
+
+
+ # @param i=None Type: int
+ def abstract_declarator(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Abstract_declaratorContext)
+ else:
+ return self.getTypedRuleContext(CParser.Abstract_declaratorContext,i)
+
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ # @param i=None Type: int
+ def pointer(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.PointerContext)
+ else:
+ return self.getTypedRuleContext(CParser.PointerContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_parameter_declaration
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterParameter_declaration" ):
+ listener.enterParameter_declaration(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitParameter_declaration" ):
+ listener.exitParameter_declaration(self)
+
+
+
+
+ def parameter_declaration(self):
+
+ localctx = CParser.Parameter_declarationContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 54, self.RULE_parameter_declaration)
+ self._la = 0 # Token type
+ try:
+ self.state = 439
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,51,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 421
+ self.declaration_specifiers()
+ self.state = 426
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while ((((_la - 34)) & ~0x3f) == 0 and ((1 << (_la - 34)) & ((1 << (CParser.T__33 - 34)) | (1 << (CParser.T__34 - 34)) | (1 << (CParser.T__35 - 34)) | (1 << (CParser.T__37 - 34)) | (1 << (CParser.T__39 - 34)) | (1 << (CParser.T__41 - 34)) | (1 << (CParser.IDENTIFIER - 34)))) != 0):
+ self.state = 424
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,47,self._ctx)
+ if la_ == 1:
+ self.state = 422
+ self.declarator()
+ pass
+
+ elif la_ == 2:
+ self.state = 423
+ self.abstract_declarator()
+ pass
+
+
+ self.state = 428
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 430
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__28:
+ self.state = 429
+ self.match(CParser.T__28)
+
+
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 435
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__41:
+ self.state = 432
+ self.pointer()
+ self.state = 437
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 438
+ self.match(CParser.IDENTIFIER)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Identifier_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def IDENTIFIER(self,i=None):
+ if i is None:
+ return self.getTokens(CParser.IDENTIFIER)
+ else:
+ return self.getToken(CParser.IDENTIFIER, i)
+
+ def getRuleIndex(self):
+ return CParser.RULE_identifier_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterIdentifier_list" ):
+ listener.enterIdentifier_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitIdentifier_list" ):
+ listener.exitIdentifier_list(self)
+
+
+
+
+ def identifier_list(self):
+
+ localctx = CParser.Identifier_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 56, self.RULE_identifier_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 441
+ self.match(CParser.IDENTIFIER)
+ self.state = 446
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__3:
+ self.state = 442
+ self.match(CParser.T__3)
+ self.state = 443
+ self.match(CParser.IDENTIFIER)
+ self.state = 448
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Type_nameContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def specifier_qualifier_list(self):
+ return self.getTypedRuleContext(CParser.Specifier_qualifier_listContext,0)
+
+
+ def abstract_declarator(self):
+ return self.getTypedRuleContext(CParser.Abstract_declaratorContext,0)
+
+
+ def type_id(self):
+ return self.getTypedRuleContext(CParser.Type_idContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_type_name
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterType_name" ):
+ listener.enterType_name(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitType_name" ):
+ listener.exitType_name(self)
+
+
+
+
+ def type_name(self):
+
+ localctx = CParser.Type_nameContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 58, self.RULE_type_name)
+ self._la = 0 # Token type
+ try:
+ self.state = 454
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,54,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 449
+ self.specifier_qualifier_list()
+ self.state = 451
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__37) | (1 << CParser.T__39) | (1 << CParser.T__41))) != 0):
+ self.state = 450
+ self.abstract_declarator()
+
+
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 453
+ self.type_id()
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Abstract_declaratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def pointer(self):
+ return self.getTypedRuleContext(CParser.PointerContext,0)
+
+
+ def direct_abstract_declarator(self):
+ return self.getTypedRuleContext(CParser.Direct_abstract_declaratorContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_abstract_declarator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAbstract_declarator" ):
+ listener.enterAbstract_declarator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAbstract_declarator" ):
+ listener.exitAbstract_declarator(self)
+
+
+
+
+ def abstract_declarator(self):
+
+ localctx = CParser.Abstract_declaratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 60, self.RULE_abstract_declarator)
+ try:
+ self.state = 461
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__41]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 456
+ self.pointer()
+ self.state = 458
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,55,self._ctx)
+ if la_ == 1:
+ self.state = 457
+ self.direct_abstract_declarator()
+
+
+ pass
+ elif token in [CParser.T__37, CParser.T__39]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 460
+ self.direct_abstract_declarator()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Direct_abstract_declaratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def abstract_declarator(self):
+ return self.getTypedRuleContext(CParser.Abstract_declaratorContext,0)
+
+
+ # @param i=None Type: int
+ def abstract_declarator_suffix(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Abstract_declarator_suffixContext)
+ else:
+ return self.getTypedRuleContext(CParser.Abstract_declarator_suffixContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_direct_abstract_declarator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterDirect_abstract_declarator" ):
+ listener.enterDirect_abstract_declarator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitDirect_abstract_declarator" ):
+ listener.exitDirect_abstract_declarator(self)
+
+
+
+ def direct_abstract_declarator(self):
+
+ localctx = CParser.Direct_abstract_declaratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 62, self.RULE_direct_abstract_declarator)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 468
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,57,self._ctx)
+ if la_ == 1:
+ self.state = 463
+ self.match(CParser.T__37)
+ self.state = 464
+ self.abstract_declarator()
+ self.state = 465
+ self.match(CParser.T__38)
+ pass
+
+ elif la_ == 2:
+ self.state = 467
+ self.abstract_declarator_suffix()
+ pass
+
+
+ self.state = 473
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,58,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 470
+ self.abstract_declarator_suffix()
+ self.state = 475
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,58,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Abstract_declarator_suffixContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def constant_expression(self):
+ return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
+
+
+ def parameter_type_list(self):
+ return self.getTypedRuleContext(CParser.Parameter_type_listContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_abstract_declarator_suffix
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAbstract_declarator_suffix" ):
+ listener.enterAbstract_declarator_suffix(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAbstract_declarator_suffix" ):
+ listener.exitAbstract_declarator_suffix(self)
+
+
+
+
+ def abstract_declarator_suffix(self):
+
+ localctx = CParser.Abstract_declarator_suffixContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 64, self.RULE_abstract_declarator_suffix)
+ try:
+ self.state = 488
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,59,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 476
+ self.match(CParser.T__39)
+ self.state = 477
+ self.match(CParser.T__40)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 478
+ self.match(CParser.T__39)
+ self.state = 479
+ self.constant_expression()
+ self.state = 480
+ self.match(CParser.T__40)
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 482
+ self.match(CParser.T__37)
+ self.state = 483
+ self.match(CParser.T__38)
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 484
+ self.match(CParser.T__37)
+ self.state = 485
+ self.parameter_type_list()
+ self.state = 486
+ self.match(CParser.T__38)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class InitializerContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def assignment_expression(self):
+ return self.getTypedRuleContext(CParser.Assignment_expressionContext,0)
+
+
+ def initializer_list(self):
+ return self.getTypedRuleContext(CParser.Initializer_listContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_initializer
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterInitializer" ):
+ listener.enterInitializer(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitInitializer" ):
+ listener.exitInitializer(self)
+
+
+
+
+ def initializer(self):
+
+ localctx = CParser.InitializerContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 66, self.RULE_initializer)
+ self._la = 0 # Token type
+ try:
+ self.state = 498
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__37, CParser.T__41, CParser.T__43, CParser.T__44, CParser.T__47, CParser.T__48, CParser.T__49, CParser.T__52, CParser.T__53, CParser.T__54, CParser.IDENTIFIER, CParser.CHARACTER_LITERAL, CParser.STRING_LITERAL, CParser.HEX_LITERAL, CParser.DECIMAL_LITERAL, CParser.OCTAL_LITERAL, CParser.FLOATING_POINT_LITERAL]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 490
+ self.assignment_expression()
+ pass
+ elif token in [CParser.T__0]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 491
+ self.match(CParser.T__0)
+ self.state = 492
+ self.initializer_list()
+ self.state = 494
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__3:
+ self.state = 493
+ self.match(CParser.T__3)
+
+
+ self.state = 496
+ self.match(CParser.T__19)
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Initializer_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def initializer(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.InitializerContext)
+ else:
+ return self.getTypedRuleContext(CParser.InitializerContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_initializer_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterInitializer_list" ):
+ listener.enterInitializer_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitInitializer_list" ):
+ listener.exitInitializer_list(self)
+
+
+
+
+ def initializer_list(self):
+
+ localctx = CParser.Initializer_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 68, self.RULE_initializer_list)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 500
+ self.initializer()
+ self.state = 505
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,62,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 501
+ self.match(CParser.T__3)
+ self.state = 502
+ self.initializer()
+ self.state = 507
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,62,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Argument_expression_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def assignment_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Assignment_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Assignment_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_argument_expression_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterArgument_expression_list" ):
+ listener.enterArgument_expression_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitArgument_expression_list" ):
+ listener.exitArgument_expression_list(self)
+
+
+
+
+ def argument_expression_list(self):
+
+ localctx = CParser.Argument_expression_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 70, self.RULE_argument_expression_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 508
+ self.assignment_expression()
+ self.state = 510
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__28:
+ self.state = 509
+ self.match(CParser.T__28)
+
+
+ self.state = 519
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__3:
+ self.state = 512
+ self.match(CParser.T__3)
+ self.state = 513
+ self.assignment_expression()
+ self.state = 515
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__28:
+ self.state = 514
+ self.match(CParser.T__28)
+
+
+ self.state = 521
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Additive_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def multiplicative_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Multiplicative_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Multiplicative_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_additive_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAdditive_expression" ):
+ listener.enterAdditive_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAdditive_expression" ):
+ listener.exitAdditive_expression(self)
+
+
+
+
+ def additive_expression(self):
+
+ localctx = CParser.Additive_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 72, self.RULE_additive_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 522
+ self.multiplicative_expression()
+ self.state = 529
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__43 or _la==CParser.T__44:
+ self.state = 527
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__43]:
+ self.state = 523
+ self.match(CParser.T__43)
+ self.state = 524
+ self.multiplicative_expression()
+ pass
+ elif token in [CParser.T__44]:
+ self.state = 525
+ self.match(CParser.T__44)
+ self.state = 526
+ self.multiplicative_expression()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ self.state = 531
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Multiplicative_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def cast_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Cast_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Cast_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_multiplicative_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterMultiplicative_expression" ):
+ listener.enterMultiplicative_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitMultiplicative_expression" ):
+ listener.exitMultiplicative_expression(self)
+
+
+
+
+ def multiplicative_expression(self):
+
+ localctx = CParser.Multiplicative_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 74, self.RULE_multiplicative_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 532
+ self.cast_expression()
+ self.state = 541
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__41) | (1 << CParser.T__45) | (1 << CParser.T__46))) != 0):
+ self.state = 539
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__41]:
+ self.state = 533
+ self.match(CParser.T__41)
+ self.state = 534
+ self.cast_expression()
+ pass
+ elif token in [CParser.T__45]:
+ self.state = 535
+ self.match(CParser.T__45)
+ self.state = 536
+ self.cast_expression()
+ pass
+ elif token in [CParser.T__46]:
+ self.state = 537
+ self.match(CParser.T__46)
+ self.state = 538
+ self.cast_expression()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ self.state = 543
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Cast_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def type_name(self):
+ return self.getTypedRuleContext(CParser.Type_nameContext,0)
+
+
+ def cast_expression(self):
+ return self.getTypedRuleContext(CParser.Cast_expressionContext,0)
+
+
+ def unary_expression(self):
+ return self.getTypedRuleContext(CParser.Unary_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_cast_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterCast_expression" ):
+ listener.enterCast_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitCast_expression" ):
+ listener.exitCast_expression(self)
+
+
+
+
+ def cast_expression(self):
+
+ localctx = CParser.Cast_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 76, self.RULE_cast_expression)
+ try:
+ self.state = 550
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,70,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 544
+ self.match(CParser.T__37)
+ self.state = 545
+ self.type_name()
+ self.state = 546
+ self.match(CParser.T__38)
+ self.state = 547
+ self.cast_expression()
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 549
+ self.unary_expression()
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Unary_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def postfix_expression(self):
+ return self.getTypedRuleContext(CParser.Postfix_expressionContext,0)
+
+
+ def unary_expression(self):
+ return self.getTypedRuleContext(CParser.Unary_expressionContext,0)
+
+
+ def unary_operator(self):
+ return self.getTypedRuleContext(CParser.Unary_operatorContext,0)
+
+
+ def cast_expression(self):
+ return self.getTypedRuleContext(CParser.Cast_expressionContext,0)
+
+
+ def type_name(self):
+ return self.getTypedRuleContext(CParser.Type_nameContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_unary_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterUnary_expression" ):
+ listener.enterUnary_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitUnary_expression" ):
+ listener.exitUnary_expression(self)
+
+
+
+
+ def unary_expression(self):
+
+ localctx = CParser.Unary_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 78, self.RULE_unary_expression)
+ try:
+ self.state = 567
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,71,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 552
+ self.postfix_expression()
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 553
+ self.match(CParser.T__47)
+ self.state = 554
+ self.unary_expression()
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 555
+ self.match(CParser.T__48)
+ self.state = 556
+ self.unary_expression()
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 557
+ self.unary_operator()
+ self.state = 558
+ self.cast_expression()
+ pass
+
+ elif la_ == 5:
+ self.enterOuterAlt(localctx, 5)
+ self.state = 560
+ self.match(CParser.T__49)
+ self.state = 561
+ self.unary_expression()
+ pass
+
+ elif la_ == 6:
+ self.enterOuterAlt(localctx, 6)
+ self.state = 562
+ self.match(CParser.T__49)
+ self.state = 563
+ self.match(CParser.T__37)
+ self.state = 564
+ self.type_name()
+ self.state = 565
+ self.match(CParser.T__38)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Postfix_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.FuncCallText = ''
+ self.p = None # Primary_expressionContext
+ self.a = None # Token
+ self.c = None # Argument_expression_listContext
+ self.b = None # Token
+ self.x = None # Token
+ self.y = None # Token
+ self.z = None # Token
+
+ def primary_expression(self):
+ return self.getTypedRuleContext(CParser.Primary_expressionContext,0)
+
+
+ # @param i=None Type: int
+ def expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.ExpressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.ExpressionContext,i)
+
+
+ # @param i=None Type: int
+ def macro_parameter_list(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Macro_parameter_listContext)
+ else:
+ return self.getTypedRuleContext(CParser.Macro_parameter_listContext,i)
+
+
+ # @param i=None Type: int
+ def argument_expression_list(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Argument_expression_listContext)
+ else:
+ return self.getTypedRuleContext(CParser.Argument_expression_listContext,i)
+
+
+ # @param i=None Type: int
+ def IDENTIFIER(self,i=None):
+ if i is None:
+ return self.getTokens(CParser.IDENTIFIER)
+ else:
+ return self.getToken(CParser.IDENTIFIER, i)
+
+ def getRuleIndex(self):
+ return CParser.RULE_postfix_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterPostfix_expression" ):
+ listener.enterPostfix_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitPostfix_expression" ):
+ listener.exitPostfix_expression(self)
+
+
+
+
+ def postfix_expression(self):
+
+ localctx = CParser.Postfix_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 80, self.RULE_postfix_expression)
+
+ self.FuncCallText=''
+
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 569
+ localctx.p = self.primary_expression()
+ self.FuncCallText += (None if localctx.p is None else self._input.getText((localctx.p.start,localctx.p.stop)))
+ self.state = 600
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,73,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 598
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,72,self._ctx)
+ if la_ == 1:
+ self.state = 571
+ self.match(CParser.T__39)
+ self.state = 572
+ self.expression()
+ self.state = 573
+ self.match(CParser.T__40)
+ pass
+
+ elif la_ == 2:
+ self.state = 575
+ self.match(CParser.T__37)
+ self.state = 576
+ localctx.a = self.match(CParser.T__38)
+ self.StoreFunctionCalling((None if localctx.p is None else localctx.p.start).line, (None if localctx.p is None else localctx.p.start).column, (0 if localctx.a is None else localctx.a.line), localctx.a.column, self.FuncCallText, '')
+ pass
+
+ elif la_ == 3:
+ self.state = 578
+ self.match(CParser.T__37)
+ self.state = 579
+ localctx.c = self.argument_expression_list()
+ self.state = 580
+ localctx.b = self.match(CParser.T__38)
+ self.StoreFunctionCalling((None if localctx.p is None else localctx.p.start).line, (None if localctx.p is None else localctx.p.start).column, (0 if localctx.b is None else localctx.b.line), localctx.b.column, self.FuncCallText, (None if localctx.c is None else self._input.getText((localctx.c.start,localctx.c.stop))))
+ pass
+
+ elif la_ == 4:
+ self.state = 583
+ self.match(CParser.T__37)
+ self.state = 584
+ self.macro_parameter_list()
+ self.state = 585
+ self.match(CParser.T__38)
+ pass
+
+ elif la_ == 5:
+ self.state = 587
+ self.match(CParser.T__50)
+ self.state = 588
+ localctx.x = self.match(CParser.IDENTIFIER)
+ self.FuncCallText += '.' + (None if localctx.x is None else localctx.x.text)
+ pass
+
+ elif la_ == 6:
+ self.state = 590
+ self.match(CParser.T__41)
+ self.state = 591
+ localctx.y = self.match(CParser.IDENTIFIER)
+ self.FuncCallText = (None if localctx.y is None else localctx.y.text)
+ pass
+
+ elif la_ == 7:
+ self.state = 593
+ self.match(CParser.T__51)
+ self.state = 594
+ localctx.z = self.match(CParser.IDENTIFIER)
+ self.FuncCallText += '->' + (None if localctx.z is None else localctx.z.text)
+ pass
+
+ elif la_ == 8:
+ self.state = 596
+ self.match(CParser.T__47)
+ pass
+
+ elif la_ == 9:
+ self.state = 597
+ self.match(CParser.T__48)
+ pass
+
+
+ self.state = 602
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,73,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Macro_parameter_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def parameter_declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Parameter_declarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.Parameter_declarationContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_macro_parameter_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterMacro_parameter_list" ):
+ listener.enterMacro_parameter_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitMacro_parameter_list" ):
+ listener.exitMacro_parameter_list(self)
+
+
+
+
+ def macro_parameter_list(self):
+
+ localctx = CParser.Macro_parameter_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 82, self.RULE_macro_parameter_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 603
+ self.parameter_declaration()
+ self.state = 608
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__3:
+ self.state = 604
+ self.match(CParser.T__3)
+ self.state = 605
+ self.parameter_declaration()
+ self.state = 610
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Unary_operatorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_unary_operator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterUnary_operator" ):
+ listener.enterUnary_operator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitUnary_operator" ):
+ listener.exitUnary_operator(self)
+
+
+
+
+ def unary_operator(self):
+
+ localctx = CParser.Unary_operatorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 84, self.RULE_unary_operator)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 611
+ _la = self._input.LA(1)
+ if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__41) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54))) != 0)):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Primary_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def constant(self):
+ return self.getTypedRuleContext(CParser.ConstantContext,0)
+
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_primary_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterPrimary_expression" ):
+ listener.enterPrimary_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitPrimary_expression" ):
+ listener.exitPrimary_expression(self)
+
+
+
+
+ def primary_expression(self):
+
+ localctx = CParser.Primary_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 86, self.RULE_primary_expression)
+ try:
+ self.state = 619
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,75,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 613
+ self.match(CParser.IDENTIFIER)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 614
+ self.constant()
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 615
+ self.match(CParser.T__37)
+ self.state = 616
+ self.expression()
+ self.state = 617
+ self.match(CParser.T__38)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class ConstantContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def HEX_LITERAL(self):
+ return self.getToken(CParser.HEX_LITERAL, 0)
+
+ def OCTAL_LITERAL(self):
+ return self.getToken(CParser.OCTAL_LITERAL, 0)
+
+ def DECIMAL_LITERAL(self):
+ return self.getToken(CParser.DECIMAL_LITERAL, 0)
+
+ def CHARACTER_LITERAL(self):
+ return self.getToken(CParser.CHARACTER_LITERAL, 0)
+
+ # @param i=None Type: int
+ def IDENTIFIER(self,i=None):
+ if i is None:
+ return self.getTokens(CParser.IDENTIFIER)
+ else:
+ return self.getToken(CParser.IDENTIFIER, i)
+
+ # @param i=None Type: int
+ def STRING_LITERAL(self,i=None):
+ if i is None:
+ return self.getTokens(CParser.STRING_LITERAL)
+ else:
+ return self.getToken(CParser.STRING_LITERAL, i)
+
+ def FLOATING_POINT_LITERAL(self):
+ return self.getToken(CParser.FLOATING_POINT_LITERAL, 0)
+
+ def getRuleIndex(self):
+ return CParser.RULE_constant
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterConstant" ):
+ listener.enterConstant(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitConstant" ):
+ listener.exitConstant(self)
+
+
+
+
+ def constant(self):
+
+ localctx = CParser.ConstantContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 88, self.RULE_constant)
+ self._la = 0 # Token type
+ try:
+ self.state = 647
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.HEX_LITERAL]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 621
+ self.match(CParser.HEX_LITERAL)
+ pass
+ elif token in [CParser.OCTAL_LITERAL]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 622
+ self.match(CParser.OCTAL_LITERAL)
+ pass
+ elif token in [CParser.DECIMAL_LITERAL]:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 623
+ self.match(CParser.DECIMAL_LITERAL)
+ pass
+ elif token in [CParser.CHARACTER_LITERAL]:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 624
+ self.match(CParser.CHARACTER_LITERAL)
+ pass
+ elif token in [CParser.IDENTIFIER, CParser.STRING_LITERAL]:
+ self.enterOuterAlt(localctx, 5)
+ self.state = 636
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 628
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.IDENTIFIER:
+ self.state = 625
+ self.match(CParser.IDENTIFIER)
+ self.state = 630
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 632
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 631
+ self.match(CParser.STRING_LITERAL)
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 634
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,77,self._ctx)
+
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 638
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,78,self._ctx)
+
+ self.state = 643
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.IDENTIFIER:
+ self.state = 640
+ self.match(CParser.IDENTIFIER)
+ self.state = 645
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ pass
+ elif token in [CParser.FLOATING_POINT_LITERAL]:
+ self.enterOuterAlt(localctx, 6)
+ self.state = 646
+ self.match(CParser.FLOATING_POINT_LITERAL)
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class ExpressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def assignment_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Assignment_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Assignment_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterExpression" ):
+ listener.enterExpression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitExpression" ):
+ listener.exitExpression(self)
+
+
+
+
+ def expression(self):
+
+ localctx = CParser.ExpressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 90, self.RULE_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 649
+ self.assignment_expression()
+ self.state = 654
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__3:
+ self.state = 650
+ self.match(CParser.T__3)
+ self.state = 651
+ self.assignment_expression()
+ self.state = 656
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Constant_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def conditional_expression(self):
+ return self.getTypedRuleContext(CParser.Conditional_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_constant_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterConstant_expression" ):
+ listener.enterConstant_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitConstant_expression" ):
+ listener.exitConstant_expression(self)
+
+
+
+
+ def constant_expression(self):
+
+ localctx = CParser.Constant_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 92, self.RULE_constant_expression)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 657
+ self.conditional_expression()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Assignment_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def lvalue(self):
+ return self.getTypedRuleContext(CParser.LvalueContext,0)
+
+
+ def assignment_operator(self):
+ return self.getTypedRuleContext(CParser.Assignment_operatorContext,0)
+
+
+ def assignment_expression(self):
+ return self.getTypedRuleContext(CParser.Assignment_expressionContext,0)
+
+
+ def conditional_expression(self):
+ return self.getTypedRuleContext(CParser.Conditional_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_assignment_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAssignment_expression" ):
+ listener.enterAssignment_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAssignment_expression" ):
+ listener.exitAssignment_expression(self)
+
+
+
+
+ def assignment_expression(self):
+
+ localctx = CParser.Assignment_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 94, self.RULE_assignment_expression)
+ try:
+ self.state = 664
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,82,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 659
+ self.lvalue()
+ self.state = 660
+ self.assignment_operator()
+ self.state = 661
+ self.assignment_expression()
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 663
+ self.conditional_expression()
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class LvalueContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def unary_expression(self):
+ return self.getTypedRuleContext(CParser.Unary_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_lvalue
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterLvalue" ):
+ listener.enterLvalue(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitLvalue" ):
+ listener.exitLvalue(self)
+
+
+
+
+ def lvalue(self):
+
+ localctx = CParser.LvalueContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 96, self.RULE_lvalue)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 666
+ self.unary_expression()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Assignment_operatorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_assignment_operator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAssignment_operator" ):
+ listener.enterAssignment_operator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAssignment_operator" ):
+ listener.exitAssignment_operator(self)
+
+
+
+
+ def assignment_operator(self):
+
+ localctx = CParser.Assignment_operatorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 98, self.RULE_assignment_operator)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 668
+ _la = self._input.LA(1)
+ if not(((((_la - 5)) & ~0x3f) == 0 and ((1 << (_la - 5)) & ((1 << (CParser.T__4 - 5)) | (1 << (CParser.T__55 - 5)) | (1 << (CParser.T__56 - 5)) | (1 << (CParser.T__57 - 5)) | (1 << (CParser.T__58 - 5)) | (1 << (CParser.T__59 - 5)) | (1 << (CParser.T__60 - 5)) | (1 << (CParser.T__61 - 5)) | (1 << (CParser.T__62 - 5)) | (1 << (CParser.T__63 - 5)) | (1 << (CParser.T__64 - 5)))) != 0)):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Conditional_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.e = None # Logical_or_expressionContext
+
+ def logical_or_expression(self):
+ return self.getTypedRuleContext(CParser.Logical_or_expressionContext,0)
+
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def conditional_expression(self):
+ return self.getTypedRuleContext(CParser.Conditional_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_conditional_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterConditional_expression" ):
+ listener.enterConditional_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitConditional_expression" ):
+ listener.exitConditional_expression(self)
+
+
+
+
+ def conditional_expression(self):
+
+ localctx = CParser.Conditional_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 100, self.RULE_conditional_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 670
+ localctx.e = self.logical_or_expression()
+ self.state = 677
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__65:
+ self.state = 671
+ self.match(CParser.T__65)
+ self.state = 672
+ self.expression()
+ self.state = 673
+ self.match(CParser.T__22)
+ self.state = 674
+ self.conditional_expression()
+ self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Logical_or_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def logical_and_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Logical_and_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Logical_and_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_logical_or_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterLogical_or_expression" ):
+ listener.enterLogical_or_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitLogical_or_expression" ):
+ listener.exitLogical_or_expression(self)
+
+
+
+
+ def logical_or_expression(self):
+
+ localctx = CParser.Logical_or_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 102, self.RULE_logical_or_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 679
+ self.logical_and_expression()
+ self.state = 684
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__66:
+ self.state = 680
+ self.match(CParser.T__66)
+ self.state = 681
+ self.logical_and_expression()
+ self.state = 686
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Logical_and_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def inclusive_or_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Inclusive_or_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Inclusive_or_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_logical_and_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterLogical_and_expression" ):
+ listener.enterLogical_and_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitLogical_and_expression" ):
+ listener.exitLogical_and_expression(self)
+
+
+
+
+ def logical_and_expression(self):
+
+ localctx = CParser.Logical_and_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 104, self.RULE_logical_and_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 687
+ self.inclusive_or_expression()
+ self.state = 692
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__67:
+ self.state = 688
+ self.match(CParser.T__67)
+ self.state = 689
+ self.inclusive_or_expression()
+ self.state = 694
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Inclusive_or_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def exclusive_or_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Exclusive_or_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Exclusive_or_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_inclusive_or_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterInclusive_or_expression" ):
+ listener.enterInclusive_or_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitInclusive_or_expression" ):
+ listener.exitInclusive_or_expression(self)
+
+
+
+
+ def inclusive_or_expression(self):
+
+ localctx = CParser.Inclusive_or_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 106, self.RULE_inclusive_or_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 695
+ self.exclusive_or_expression()
+ self.state = 700
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__68:
+ self.state = 696
+ self.match(CParser.T__68)
+ self.state = 697
+ self.exclusive_or_expression()
+ self.state = 702
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Exclusive_or_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def and_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.And_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.And_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_exclusive_or_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterExclusive_or_expression" ):
+ listener.enterExclusive_or_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitExclusive_or_expression" ):
+ listener.exitExclusive_or_expression(self)
+
+
+
+
+ def exclusive_or_expression(self):
+
+ localctx = CParser.Exclusive_or_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 108, self.RULE_exclusive_or_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 703
+ self.and_expression()
+ self.state = 708
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__69:
+ self.state = 704
+ self.match(CParser.T__69)
+ self.state = 705
+ self.and_expression()
+ self.state = 710
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class And_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def equality_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Equality_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Equality_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_and_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAnd_expression" ):
+ listener.enterAnd_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAnd_expression" ):
+ listener.exitAnd_expression(self)
+
+
+
+
+ def and_expression(self):
+
+ localctx = CParser.And_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 110, self.RULE_and_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 711
+ self.equality_expression()
+ self.state = 716
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__52:
+ self.state = 712
+ self.match(CParser.T__52)
+ self.state = 713
+ self.equality_expression()
+ self.state = 718
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Equality_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def relational_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Relational_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Relational_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_equality_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterEquality_expression" ):
+ listener.enterEquality_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitEquality_expression" ):
+ listener.exitEquality_expression(self)
+
+
+
+
+ def equality_expression(self):
+
+ localctx = CParser.Equality_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 112, self.RULE_equality_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 719
+ self.relational_expression()
+ self.state = 724
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__70 or _la==CParser.T__71:
+ self.state = 720
+ _la = self._input.LA(1)
+ if not(_la==CParser.T__70 or _la==CParser.T__71):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 721
+ self.relational_expression()
+ self.state = 726
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Relational_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def shift_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Shift_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Shift_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_relational_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterRelational_expression" ):
+ listener.enterRelational_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitRelational_expression" ):
+ listener.exitRelational_expression(self)
+
+
+
+
+ def relational_expression(self):
+
+ localctx = CParser.Relational_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 114, self.RULE_relational_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 727
+ self.shift_expression()
+ self.state = 732
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while ((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & ((1 << (CParser.T__72 - 73)) | (1 << (CParser.T__73 - 73)) | (1 << (CParser.T__74 - 73)) | (1 << (CParser.T__75 - 73)))) != 0):
+ self.state = 728
+ _la = self._input.LA(1)
+ if not(((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & ((1 << (CParser.T__72 - 73)) | (1 << (CParser.T__73 - 73)) | (1 << (CParser.T__74 - 73)) | (1 << (CParser.T__75 - 73)))) != 0)):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 729
+ self.shift_expression()
+ self.state = 734
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Shift_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def additive_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Additive_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Additive_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_shift_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterShift_expression" ):
+ listener.enterShift_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitShift_expression" ):
+ listener.exitShift_expression(self)
+
+
+
+
+ def shift_expression(self):
+
+ localctx = CParser.Shift_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 116, self.RULE_shift_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 735
+ self.additive_expression()
+ self.state = 740
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__76 or _la==CParser.T__77:
+ self.state = 736
+ _la = self._input.LA(1)
+ if not(_la==CParser.T__76 or _la==CParser.T__77):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 737
+ self.additive_expression()
+ self.state = 742
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class StatementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def labeled_statement(self):
+ return self.getTypedRuleContext(CParser.Labeled_statementContext,0)
+
+
+ def compound_statement(self):
+ return self.getTypedRuleContext(CParser.Compound_statementContext,0)
+
+
+ def expression_statement(self):
+ return self.getTypedRuleContext(CParser.Expression_statementContext,0)
+
+
+ def selection_statement(self):
+ return self.getTypedRuleContext(CParser.Selection_statementContext,0)
+
+
+ def iteration_statement(self):
+ return self.getTypedRuleContext(CParser.Iteration_statementContext,0)
+
+
+ def jump_statement(self):
+ return self.getTypedRuleContext(CParser.Jump_statementContext,0)
+
+
+ def macro_statement(self):
+ return self.getTypedRuleContext(CParser.Macro_statementContext,0)
+
+
+ def asm2_statement(self):
+ return self.getTypedRuleContext(CParser.Asm2_statementContext,0)
+
+
+ def asm1_statement(self):
+ return self.getTypedRuleContext(CParser.Asm1_statementContext,0)
+
+
+ def asm_statement(self):
+ return self.getTypedRuleContext(CParser.Asm_statementContext,0)
+
+
+ def declaration(self):
+ return self.getTypedRuleContext(CParser.DeclarationContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStatement" ):
+ listener.enterStatement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStatement" ):
+ listener.exitStatement(self)
+
+
+
+
+ def statement(self):
+
+ localctx = CParser.StatementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 118, self.RULE_statement)
+ try:
+ self.state = 754
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,92,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 743
+ self.labeled_statement()
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 744
+ self.compound_statement()
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 745
+ self.expression_statement()
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 746
+ self.selection_statement()
+ pass
+
+ elif la_ == 5:
+ self.enterOuterAlt(localctx, 5)
+ self.state = 747
+ self.iteration_statement()
+ pass
+
+ elif la_ == 6:
+ self.enterOuterAlt(localctx, 6)
+ self.state = 748
+ self.jump_statement()
+ pass
+
+ elif la_ == 7:
+ self.enterOuterAlt(localctx, 7)
+ self.state = 749
+ self.macro_statement()
+ pass
+
+ elif la_ == 8:
+ self.enterOuterAlt(localctx, 8)
+ self.state = 750
+ self.asm2_statement()
+ pass
+
+ elif la_ == 9:
+ self.enterOuterAlt(localctx, 9)
+ self.state = 751
+ self.asm1_statement()
+ pass
+
+ elif la_ == 10:
+ self.enterOuterAlt(localctx, 10)
+ self.state = 752
+ self.asm_statement()
+ pass
+
+ elif la_ == 11:
+ self.enterOuterAlt(localctx, 11)
+ self.state = 753
+ self.declaration()
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Asm2_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def getRuleIndex(self):
+ return CParser.RULE_asm2_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAsm2_statement" ):
+ listener.enterAsm2_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAsm2_statement" ):
+ listener.exitAsm2_statement(self)
+
+
+
+
+ def asm2_statement(self):
+
+ localctx = CParser.Asm2_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 120, self.RULE_asm2_statement)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 757
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__78:
+ self.state = 756
+ self.match(CParser.T__78)
+
+
+ self.state = 759
+ self.match(CParser.IDENTIFIER)
+ self.state = 760
+ self.match(CParser.T__37)
+ self.state = 764
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,94,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 761
+ _la = self._input.LA(1)
+ if _la <= 0 or _la==CParser.T__1:
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 766
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,94,self._ctx)
+
+ self.state = 767
+ self.match(CParser.T__38)
+ self.state = 768
+ self.match(CParser.T__1)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Asm1_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_asm1_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAsm1_statement" ):
+ listener.enterAsm1_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAsm1_statement" ):
+ listener.exitAsm1_statement(self)
+
+
+
+
+ def asm1_statement(self):
+
+ localctx = CParser.Asm1_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 122, self.RULE_asm1_statement)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 770
+ self.match(CParser.T__79)
+ self.state = 771
+ self.match(CParser.T__0)
+ self.state = 775
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__0) | (1 << CParser.T__1) | (1 << CParser.T__2) | (1 << CParser.T__3) | (1 << CParser.T__4) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__22) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__38) | (1 << CParser.T__39) | (1 << CParser.T__40) | (1 << CParser.T__41) | (1 << CParser.T__42) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__45) | (1 << CParser.T__46) | (1 << CParser.T__47) | (1 << CParser.T__48) | (1 << CParser.T__49) | (1 << CParser.T__50) | (1 << CParser.T__51) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54) | (1 << CParser.T__55) | (1 << CParser.T__56) | (1 << CParser.T__57) | (1 << CParser.T__58) | (1 << CParser.T__59) | (1 << CParser.T__60) | (1 << CParser.T__61) | (1 << CParser.T__62))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CParser.T__63 - 64)) | (1 << (CParser.T__64 - 64)) | (1 << (CParser.T__65 - 64)) | (1 << (CParser.T__66 - 64)) | (1 << (CParser.T__67 - 64)) | (1 << (CParser.T__68 - 64)) | (1 << (CParser.T__69 - 64)) | (1 << (CParser.T__70 - 64)) | (1 << (CParser.T__71 - 64)) | (1 << (CParser.T__72 - 64)) | (1 << (CParser.T__73 - 64)) | (1 << (CParser.T__74 - 64)) | (1 << (CParser.T__75 - 64)) | (1 << (CParser.T__76 - 64)) | (1 << (CParser.T__77 - 64)) | (1 << (CParser.T__78 - 64)) | (1 << (CParser.T__79 - 64)) | (1 << (CParser.T__80 - 64)) | (1 << (CParser.T__81 - 64)) | (1 << (CParser.T__82 - 64)) | (1 << (CParser.T__83 - 64)) | (1 << (CParser.T__84 - 64)) | (1 << (CParser.T__85 - 64)) | (1 << (CParser.T__86 - 64)) | (1 << (CParser.T__87 - 64)) | (1 << (CParser.T__88 - 64)) | (1 << (CParser.T__89 - 64)) | (1 << (CParser.T__90 - 64)) | (1 << (CParser.T__91 - 64)) | (1 << (CParser.IDENTIFIER - 64)) | (1 << (CParser.CHARACTER_LITERAL - 64)) | (1 << (CParser.STRING_LITERAL - 64)) | (1 << (CParser.HEX_LITERAL - 64)) | (1 << (CParser.DECIMAL_LITERAL - 64)) | (1 << (CParser.OCTAL_LITERAL - 64)) | (1 << (CParser.FLOATING_POINT_LITERAL - 64)) | (1 << (CParser.WS - 64)) | (1 << (CParser.BS - 64)) | (1 << (CParser.UnicodeVocabulary - 64)) | (1 << (CParser.COMMENT - 64)) | (1 << (CParser.LINE_COMMENT - 64)) | (1 << (CParser.LINE_COMMAND - 64)))) != 0):
+ self.state = 772
+ _la = self._input.LA(1)
+ if _la <= 0 or _la==CParser.T__19:
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 777
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 778
+ self.match(CParser.T__19)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Asm_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_asm_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAsm_statement" ):
+ listener.enterAsm_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAsm_statement" ):
+ listener.exitAsm_statement(self)
+
+
+
+
+ def asm_statement(self):
+
+ localctx = CParser.Asm_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 124, self.RULE_asm_statement)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 780
+ self.match(CParser.T__80)
+ self.state = 781
+ self.match(CParser.T__0)
+ self.state = 785
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__0) | (1 << CParser.T__1) | (1 << CParser.T__2) | (1 << CParser.T__3) | (1 << CParser.T__4) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__22) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__38) | (1 << CParser.T__39) | (1 << CParser.T__40) | (1 << CParser.T__41) | (1 << CParser.T__42) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__45) | (1 << CParser.T__46) | (1 << CParser.T__47) | (1 << CParser.T__48) | (1 << CParser.T__49) | (1 << CParser.T__50) | (1 << CParser.T__51) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54) | (1 << CParser.T__55) | (1 << CParser.T__56) | (1 << CParser.T__57) | (1 << CParser.T__58) | (1 << CParser.T__59) | (1 << CParser.T__60) | (1 << CParser.T__61) | (1 << CParser.T__62))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CParser.T__63 - 64)) | (1 << (CParser.T__64 - 64)) | (1 << (CParser.T__65 - 64)) | (1 << (CParser.T__66 - 64)) | (1 << (CParser.T__67 - 64)) | (1 << (CParser.T__68 - 64)) | (1 << (CParser.T__69 - 64)) | (1 << (CParser.T__70 - 64)) | (1 << (CParser.T__71 - 64)) | (1 << (CParser.T__72 - 64)) | (1 << (CParser.T__73 - 64)) | (1 << (CParser.T__74 - 64)) | (1 << (CParser.T__75 - 64)) | (1 << (CParser.T__76 - 64)) | (1 << (CParser.T__77 - 64)) | (1 << (CParser.T__78 - 64)) | (1 << (CParser.T__79 - 64)) | (1 << (CParser.T__80 - 64)) | (1 << (CParser.T__81 - 64)) | (1 << (CParser.T__82 - 64)) | (1 << (CParser.T__83 - 64)) | (1 << (CParser.T__84 - 64)) | (1 << (CParser.T__85 - 64)) | (1 << (CParser.T__86 - 64)) | (1 << (CParser.T__87 - 64)) | (1 << (CParser.T__88 - 64)) | (1 << (CParser.T__89 - 64)) | (1 << (CParser.T__90 - 64)) | (1 << (CParser.T__91 - 64)) | (1 << (CParser.IDENTIFIER - 64)) | (1 << (CParser.CHARACTER_LITERAL - 64)) | (1 << (CParser.STRING_LITERAL - 64)) | (1 << (CParser.HEX_LITERAL - 64)) | (1 << (CParser.DECIMAL_LITERAL - 64)) | (1 << (CParser.OCTAL_LITERAL - 64)) | (1 << (CParser.FLOATING_POINT_LITERAL - 64)) | (1 << (CParser.WS - 64)) | (1 << (CParser.BS - 64)) | (1 << (CParser.UnicodeVocabulary - 64)) | (1 << (CParser.COMMENT - 64)) | (1 << (CParser.LINE_COMMENT - 64)) | (1 << (CParser.LINE_COMMAND - 64)))) != 0):
+ self.state = 782
+ _la = self._input.LA(1)
+ if _la <= 0 or _la==CParser.T__19:
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 787
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 788
+ self.match(CParser.T__19)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Macro_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ # @param i=None Type: int
+ def declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.DeclarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.DeclarationContext,i)
+
+
+ def statement_list(self):
+ return self.getTypedRuleContext(CParser.Statement_listContext,0)
+
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_macro_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterMacro_statement" ):
+ listener.enterMacro_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitMacro_statement" ):
+ listener.exitMacro_statement(self)
+
+
+
+
+ def macro_statement(self):
+
+ localctx = CParser.Macro_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 126, self.RULE_macro_statement)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 790
+ self.match(CParser.IDENTIFIER)
+ self.state = 791
+ self.match(CParser.T__37)
+ self.state = 795
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,97,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 792
+ self.declaration()
+ self.state = 797
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,97,self._ctx)
+
+ self.state = 799
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,98,self._ctx)
+ if la_ == 1:
+ self.state = 798
+ self.statement_list()
+
+
+ self.state = 802
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if ((((_la - 38)) & ~0x3f) == 0 and ((1 << (_la - 38)) & ((1 << (CParser.T__37 - 38)) | (1 << (CParser.T__41 - 38)) | (1 << (CParser.T__43 - 38)) | (1 << (CParser.T__44 - 38)) | (1 << (CParser.T__47 - 38)) | (1 << (CParser.T__48 - 38)) | (1 << (CParser.T__49 - 38)) | (1 << (CParser.T__52 - 38)) | (1 << (CParser.T__53 - 38)) | (1 << (CParser.T__54 - 38)) | (1 << (CParser.IDENTIFIER - 38)) | (1 << (CParser.CHARACTER_LITERAL - 38)) | (1 << (CParser.STRING_LITERAL - 38)) | (1 << (CParser.HEX_LITERAL - 38)) | (1 << (CParser.DECIMAL_LITERAL - 38)) | (1 << (CParser.OCTAL_LITERAL - 38)) | (1 << (CParser.FLOATING_POINT_LITERAL - 38)))) != 0):
+ self.state = 801
+ self.expression()
+
+
+ self.state = 804
+ self.match(CParser.T__38)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Labeled_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def statement(self):
+ return self.getTypedRuleContext(CParser.StatementContext,0)
+
+
+ def constant_expression(self):
+ return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_labeled_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterLabeled_statement" ):
+ listener.enterLabeled_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitLabeled_statement" ):
+ listener.exitLabeled_statement(self)
+
+
+
+
+ def labeled_statement(self):
+
+ localctx = CParser.Labeled_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 128, self.RULE_labeled_statement)
+ try:
+ self.state = 817
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.IDENTIFIER]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 806
+ self.match(CParser.IDENTIFIER)
+ self.state = 807
+ self.match(CParser.T__22)
+ self.state = 808
+ self.statement()
+ pass
+ elif token in [CParser.T__81]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 809
+ self.match(CParser.T__81)
+ self.state = 810
+ self.constant_expression()
+ self.state = 811
+ self.match(CParser.T__22)
+ self.state = 812
+ self.statement()
+ pass
+ elif token in [CParser.T__82]:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 814
+ self.match(CParser.T__82)
+ self.state = 815
+ self.match(CParser.T__22)
+ self.state = 816
+ self.statement()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Compound_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.DeclarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.DeclarationContext,i)
+
+
+ def statement_list(self):
+ return self.getTypedRuleContext(CParser.Statement_listContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_compound_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterCompound_statement" ):
+ listener.enterCompound_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitCompound_statement" ):
+ listener.exitCompound_statement(self)
+
+
+
+
+ def compound_statement(self):
+
+ localctx = CParser.Compound_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 130, self.RULE_compound_statement)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 819
+ self.match(CParser.T__0)
+ self.state = 823
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,101,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 820
+ self.declaration()
+ self.state = 825
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,101,self._ctx)
+
+ self.state = 827
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__0) | (1 << CParser.T__1) | (1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__41) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__47) | (1 << CParser.T__48) | (1 << CParser.T__49) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54))) != 0) or ((((_la - 79)) & ~0x3f) == 0 and ((1 << (_la - 79)) & ((1 << (CParser.T__78 - 79)) | (1 << (CParser.T__79 - 79)) | (1 << (CParser.T__80 - 79)) | (1 << (CParser.T__81 - 79)) | (1 << (CParser.T__82 - 79)) | (1 << (CParser.T__83 - 79)) | (1 << (CParser.T__85 - 79)) | (1 << (CParser.T__86 - 79)) | (1 << (CParser.T__87 - 79)) | (1 << (CParser.T__88 - 79)) | (1 << (CParser.T__89 - 79)) | (1 << (CParser.T__90 - 79)) | (1 << (CParser.T__91 - 79)) | (1 << (CParser.IDENTIFIER - 79)) | (1 << (CParser.CHARACTER_LITERAL - 79)) | (1 << (CParser.STRING_LITERAL - 79)) | (1 << (CParser.HEX_LITERAL - 79)) | (1 << (CParser.DECIMAL_LITERAL - 79)) | (1 << (CParser.OCTAL_LITERAL - 79)) | (1 << (CParser.FLOATING_POINT_LITERAL - 79)))) != 0):
+ self.state = 826
+ self.statement_list()
+
+
+ self.state = 829
+ self.match(CParser.T__19)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Statement_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def statement(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.StatementContext)
+ else:
+ return self.getTypedRuleContext(CParser.StatementContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_statement_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStatement_list" ):
+ listener.enterStatement_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStatement_list" ):
+ listener.exitStatement_list(self)
+
+
+
+
+ def statement_list(self):
+
+ localctx = CParser.Statement_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 132, self.RULE_statement_list)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 832
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 831
+ self.statement()
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 834
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,103,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Expression_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_expression_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterExpression_statement" ):
+ listener.enterExpression_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitExpression_statement" ):
+ listener.exitExpression_statement(self)
+
+
+
+
+ def expression_statement(self):
+
+ localctx = CParser.Expression_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 134, self.RULE_expression_statement)
+ try:
+ self.state = 840
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__1]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 836
+ self.match(CParser.T__1)
+ pass
+ elif token in [CParser.T__37, CParser.T__41, CParser.T__43, CParser.T__44, CParser.T__47, CParser.T__48, CParser.T__49, CParser.T__52, CParser.T__53, CParser.T__54, CParser.IDENTIFIER, CParser.CHARACTER_LITERAL, CParser.STRING_LITERAL, CParser.HEX_LITERAL, CParser.DECIMAL_LITERAL, CParser.OCTAL_LITERAL, CParser.FLOATING_POINT_LITERAL]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 837
+ self.expression()
+ self.state = 838
+ self.match(CParser.T__1)
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Selection_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.e = None # ExpressionContext
+
+ # @param i=None Type: int
+ def statement(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.StatementContext)
+ else:
+ return self.getTypedRuleContext(CParser.StatementContext,i)
+
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_selection_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterSelection_statement" ):
+ listener.enterSelection_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitSelection_statement" ):
+ listener.exitSelection_statement(self)
+
+
+
+
+ def selection_statement(self):
+
+ localctx = CParser.Selection_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 136, self.RULE_selection_statement)
+ try:
+ self.state = 858
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__83]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 842
+ self.match(CParser.T__83)
+ self.state = 843
+ self.match(CParser.T__37)
+ self.state = 844
+ localctx.e = self.expression()
+ self.state = 845
+ self.match(CParser.T__38)
+ self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
+ self.state = 847
+ self.statement()
+ self.state = 850
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,105,self._ctx)
+ if la_ == 1:
+ self.state = 848
+ self.match(CParser.T__84)
+ self.state = 849
+ self.statement()
+
+
+ pass
+ elif token in [CParser.T__85]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 852
+ self.match(CParser.T__85)
+ self.state = 853
+ self.match(CParser.T__37)
+ self.state = 854
+ self.expression()
+ self.state = 855
+ self.match(CParser.T__38)
+ self.state = 856
+ self.statement()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Iteration_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.e = None # ExpressionContext
+
+ def statement(self):
+ return self.getTypedRuleContext(CParser.StatementContext,0)
+
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_iteration_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterIteration_statement" ):
+ listener.enterIteration_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitIteration_statement" ):
+ listener.exitIteration_statement(self)
+
+
+
+
+ def iteration_statement(self):
+
+ localctx = CParser.Iteration_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 138, self.RULE_iteration_statement)
+ try:
+ self.state = 876
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__86]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 860
+ self.match(CParser.T__86)
+ self.state = 861
+ self.match(CParser.T__37)
+ self.state = 862
+ localctx.e = self.expression()
+ self.state = 863
+ self.match(CParser.T__38)
+ self.state = 864
+ self.statement()
+ self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
+ pass
+ elif token in [CParser.T__87]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 867
+ self.match(CParser.T__87)
+ self.state = 868
+ self.statement()
+ self.state = 869
+ self.match(CParser.T__86)
+ self.state = 870
+ self.match(CParser.T__37)
+ self.state = 871
+ localctx.e = self.expression()
+ self.state = 872
+ self.match(CParser.T__38)
+ self.state = 873
+ self.match(CParser.T__1)
+ self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Jump_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_jump_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterJump_statement" ):
+ listener.enterJump_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitJump_statement" ):
+ listener.exitJump_statement(self)
+
+
+
+
+ def jump_statement(self):
+
+ localctx = CParser.Jump_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 140, self.RULE_jump_statement)
+ try:
+ self.state = 891
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,108,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 878
+ self.match(CParser.T__88)
+ self.state = 879
+ self.match(CParser.IDENTIFIER)
+ self.state = 880
+ self.match(CParser.T__1)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 881
+ self.match(CParser.T__89)
+ self.state = 882
+ self.match(CParser.T__1)
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 883
+ self.match(CParser.T__90)
+ self.state = 884
+ self.match(CParser.T__1)
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 885
+ self.match(CParser.T__91)
+ self.state = 886
+ self.match(CParser.T__1)
+ pass
+
+ elif la_ == 5:
+ self.enterOuterAlt(localctx, 5)
+ self.state = 887
+ self.match(CParser.T__91)
+ self.state = 888
+ self.expression()
+ self.state = 889
+ self.match(CParser.T__1)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CParser4/__init__.py
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Check.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Check.py
new file mode 100755
index 00000000..3967fa2f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Check.py
@@ -0,0 +1,1535 @@
+## @file
+# This file is used to define checkpoints used by ECC tool
+#
+# Copyright (c) 2021, Arm Limited. All rights reserved.<BR>
+# Copyright (c) 2008 - 2020, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from __future__ import absolute_import
+import Common.LongFilePathOs as os
+import re
+from CommonDataClass.DataClass import *
+import Common.DataType as DT
+from Ecc.EccToolError import *
+from Ecc.MetaDataParser import ParseHeaderCommentSection
+from Ecc import EccGlobalData
+from Ecc import c
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+
+## Check
+#
+# This class is to define checkpoints used by ECC tool
+#
+# @param object: Inherited from object class
+#
+class Check(object):
+ def __init__(self):
+ pass
+
+ # Check all required checkpoints
+ def Check(self):
+ self.GeneralCheck()
+ self.MetaDataFileCheck()
+ self.DoxygenCheck()
+ self.IncludeFileCheck()
+ self.PredicateExpressionCheck()
+ self.DeclAndDataTypeCheck()
+ self.FunctionLayoutCheck()
+ self.NamingConventionCheck()
+ self.SmmCommParaCheck()
+
+ def SmmCommParaCheck(self):
+ self.SmmCommParaCheckBufferType()
+
+
+ # Check if SMM communication function has correct parameter type
+ # 1. Get function calling with instance./->Communicate() interface
+ # and make sure the protocol instance is of type EFI_SMM_COMMUNICATION_PROTOCOL.
+ # 2. Find the origin of the 2nd parameter of Communicate() interface, if -
+ # a. it is a local buffer on stack
+ # report error.
+ # b. it is a global buffer, check the driver that holds the global buffer is of type DXE_RUNTIME_DRIVER
+ # report success.
+ # c. it is a buffer by AllocatePage/AllocatePool (may be wrapped by nested function calls),
+ # check the EFI_MEMORY_TYPE to be EfiRuntimeServicesCode,EfiRuntimeServicesData,
+ # EfiACPIMemoryNVS or EfiReservedMemoryType
+ # report success.
+ # d. it is a buffer located via EFI_SYSTEM_TABLE.ConfigurationTable (may be wrapped by nested function calls)
+ # report warning to indicate human code review.
+ # e. it is a buffer from other kind of pointers (may need to trace into nested function calls to locate),
+ # repeat checks in a.b.c and d.
+ def SmmCommParaCheckBufferType(self):
+ if EccGlobalData.gConfig.SmmCommParaCheckBufferType == '1' or EccGlobalData.gConfig.SmmCommParaCheckAll == '1':
+ EdkLogger.quiet("Checking SMM communication parameter type ...")
+ # Get all EFI_SMM_COMMUNICATION_PROTOCOL interface
+ CommApiList = []
+ for IdentifierTable in EccGlobalData.gIdentifierTableList:
+ SqlCommand = """select ID, Name, BelongsToFile from %s
+ where Modifier = 'EFI_SMM_COMMUNICATION_PROTOCOL*' """ % (IdentifierTable)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ if RecordSet:
+ for Record in RecordSet:
+ if Record[1] not in CommApiList:
+ CommApiList.append(Record[1])
+ # For each interface, check the second parameter
+ for CommApi in CommApiList:
+ for IdentifierTable in EccGlobalData.gIdentifierTableList:
+ SqlCommand = """select ID, Name, Value, BelongsToFile, StartLine from %s
+ where Name = '%s->Communicate' and Model = %s""" \
+ % (IdentifierTable, CommApi, MODEL_IDENTIFIER_FUNCTION_CALLING)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ if RecordSet:
+ # print IdentifierTable
+ for Record in RecordSet:
+ # Get the second parameter for Communicate function
+ SecondPara = Record[2].split(',')[1].strip()
+ SecondParaIndex = None
+ if SecondPara.startswith('&'):
+ SecondPara = SecondPara[1:]
+ if SecondPara.endswith(']'):
+ SecondParaIndex = SecondPara[SecondPara.find('[') + 1:-1]
+ SecondPara = SecondPara[:SecondPara.find('[')]
+ # Get the ID
+ Id = Record[0]
+ # Get the BelongsToFile
+ BelongsToFile = Record[3]
+ # Get the source file path
+ SqlCommand = """select FullPath from File where ID = %s""" % BelongsToFile
+ NewRecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ FullPath = NewRecordSet[0][0]
+ # Get the line no of function calling
+ StartLine = Record[4]
+ # Get the module type
+ SqlCommand = """select Value3 from INF where BelongsToFile = (select ID from File
+ where Path = (select Path from File where ID = %s) and Model = 1011)
+ and Value2 = 'MODULE_TYPE'""" % BelongsToFile
+ NewRecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ ModuleType = NewRecordSet[0][0] if NewRecordSet else None
+
+ # print BelongsToFile, FullPath, StartLine, ModuleType, SecondPara
+
+ Value = FindPara(FullPath, SecondPara, StartLine)
+ # Find the value of the parameter
+ if Value:
+ if 'AllocatePage' in Value \
+ or 'AllocatePool' in Value \
+ or 'AllocateRuntimePool' in Value \
+ or 'AllocateZeroPool' in Value:
+ pass
+ else:
+ if '->' in Value:
+ if not EccGlobalData.gException.IsException(
+ ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE, Value):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE,
+ OtherMsg="Please review the buffer type"
+ + "is correct or not. If it is correct" +
+ " please add [%s] to exception list"
+ % Value,
+ BelongsToTable=IdentifierTable,
+ BelongsToItem=Id)
+ else:
+ if not EccGlobalData.gException.IsException(
+ ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE, Value):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE,
+ OtherMsg="Please review the buffer type"
+ + "is correct or not. If it is correct" +
+ " please add [%s] to exception list"
+ % Value,
+ BelongsToTable=IdentifierTable,
+ BelongsToItem=Id)
+
+
+ # Not find the value of the parameter
+ else:
+ SqlCommand = """select ID, Modifier, Name, Value, Model, BelongsToFunction from %s
+ where Name = '%s' and StartLine < %s order by StartLine DESC""" \
+ % (IdentifierTable, SecondPara, StartLine)
+ NewRecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ if NewRecordSet:
+ Value = NewRecordSet[0][1]
+ if 'AllocatePage' in Value \
+ or 'AllocatePool' in Value \
+ or 'AllocateRuntimePool' in Value \
+ or 'AllocateZeroPool' in Value:
+ pass
+ else:
+ if not EccGlobalData.gException.IsException(
+ ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE, Value):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE,
+ OtherMsg="Please review the buffer type"
+ + "is correct or not. If it is correct" +
+ " please add [%s] to exception list"
+ % Value,
+ BelongsToTable=IdentifierTable,
+ BelongsToItem=Id)
+ else:
+ pass
+
+ # Check UNI files
+ def UniCheck(self):
+ if EccGlobalData.gConfig.GeneralCheckUni == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking whether UNI file is UTF-16 ...")
+ SqlCommand = """select ID, FullPath, ExtName from File where ExtName like 'uni'"""
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ File = Record[1]
+ FileIn = open(File, 'rb').read(2)
+ if FileIn != '\xff\xfe':
+ OtherMsg = "File %s is not a valid UTF-16 UNI file" % Record[1]
+ EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_UNI, OtherMsg=OtherMsg, BelongsToTable='File', BelongsToItem=Record[0])
+
+ # General Checking
+ def GeneralCheck(self):
+ self.GeneralCheckNonAcsii()
+ self.UniCheck()
+ self.GeneralCheckNoTab()
+ self.GeneralCheckLineEnding()
+ self.GeneralCheckTrailingWhiteSpaceLine()
+
+ # Check whether NO Tab is used, replaced with spaces
+ def GeneralCheckNoTab(self):
+ if EccGlobalData.gConfig.GeneralCheckNoTab == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking No TAB used in file ...")
+ SqlCommand = """select ID, FullPath, ExtName from File where ExtName in ('.dec', '.inf', '.dsc', 'c', 'h')"""
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
+ op = open(Record[1]).readlines()
+ IndexOfLine = 0
+ for Line in op:
+ IndexOfLine += 1
+ IndexOfChar = 0
+ for Char in Line:
+ IndexOfChar += 1
+ if Char == '\t':
+ OtherMsg = "File %s has TAB char at line %s column %s" % (Record[1], IndexOfLine, IndexOfChar)
+ EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_NO_TAB, OtherMsg=OtherMsg, BelongsToTable='File', BelongsToItem=Record[0])
+
+ # Check Only use CRLF (Carriage Return Line Feed) line endings.
+ def GeneralCheckLineEnding(self):
+ if EccGlobalData.gConfig.GeneralCheckLineEnding == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking line ending in file ...")
+ SqlCommand = """select ID, FullPath, ExtName from File where ExtName in ('.dec', '.inf', '.dsc', 'c', 'h')"""
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
+ op = open(Record[1], 'rb').readlines()
+ IndexOfLine = 0
+ for Line in op:
+ IndexOfLine += 1
+ if not bytes.decode(Line).endswith('\r\n'):
+ OtherMsg = "File %s has invalid line ending at line %s" % (Record[1], IndexOfLine)
+ EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_INVALID_LINE_ENDING, OtherMsg=OtherMsg, BelongsToTable='File', BelongsToItem=Record[0])
+
+ # Check if there is no trailing white space in one line.
+ def GeneralCheckTrailingWhiteSpaceLine(self):
+ if EccGlobalData.gConfig.GeneralCheckTrailingWhiteSpaceLine == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking trailing white space line in file ...")
+ SqlCommand = """select ID, FullPath, ExtName from File where ExtName in ('.dec', '.inf', '.dsc', 'c', 'h')"""
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
+ op = open(Record[1], 'r').readlines()
+ IndexOfLine = 0
+ for Line in op:
+ IndexOfLine += 1
+ if Line.replace('\r', '').replace('\n', '').endswith(' '):
+ OtherMsg = "File %s has trailing white spaces at line %s" % (Record[1], IndexOfLine)
+ EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_TRAILING_WHITE_SPACE_LINE, OtherMsg=OtherMsg, BelongsToTable='File', BelongsToItem=Record[0])
+
+ # Check whether file has non ACSII char
+ def GeneralCheckNonAcsii(self):
+ if EccGlobalData.gConfig.GeneralCheckNonAcsii == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Non-ACSII char in file ...")
+ SqlCommand = """select ID, FullPath, ExtName from File where ExtName in ('.dec', '.inf', '.dsc', 'c', 'h')"""
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
+ op = open(Record[1]).readlines()
+ IndexOfLine = 0
+ for Line in op:
+ IndexOfLine += 1
+ IndexOfChar = 0
+ for Char in Line:
+ IndexOfChar += 1
+ if ord(Char) > 126:
+ OtherMsg = "File %s has Non-ASCII char at line %s column %s" % (Record[1], IndexOfLine, IndexOfChar)
+ EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_NON_ACSII, OtherMsg=OtherMsg, BelongsToTable='File', BelongsToItem=Record[0])
+
+ # C Function Layout Checking
+ def FunctionLayoutCheck(self):
+ self.FunctionLayoutCheckReturnType()
+ self.FunctionLayoutCheckModifier()
+ self.FunctionLayoutCheckName()
+ self.FunctionLayoutCheckPrototype()
+ self.FunctionLayoutCheckBody()
+ self.FunctionLayoutCheckLocalVariable()
+ self.FunctionLayoutCheckDeprecated()
+
+ # To check if the deprecated functions are used
+ def FunctionLayoutCheckDeprecated(self):
+ if EccGlobalData.gConfig.CFunctionLayoutCheckNoDeprecated == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking function no deprecated one being used ...")
+
+ DeprecatedFunctionSet = ('UnicodeValueToString',
+ 'AsciiValueToString',
+ 'StrCpy',
+ 'StrnCpy',
+ 'StrCat',
+ 'StrnCat',
+ 'UnicodeStrToAsciiStr',
+ 'AsciiStrCpy',
+ 'AsciiStrnCpy',
+ 'AsciiStrCat',
+ 'AsciiStrnCat',
+ 'AsciiStrToUnicodeStr',
+ 'PcdSet8',
+ 'PcdSet16',
+ 'PcdSet32',
+ 'PcdSet64',
+ 'PcdSetPtr',
+ 'PcdSetBool',
+ 'PcdSetEx8',
+ 'PcdSetEx16',
+ 'PcdSetEx32',
+ 'PcdSetEx64',
+ 'PcdSetExPtr',
+ 'PcdSetExBool',
+ 'LibPcdSet8',
+ 'LibPcdSet16',
+ 'LibPcdSet32',
+ 'LibPcdSet64',
+ 'LibPcdSetPtr',
+ 'LibPcdSetBool',
+ 'LibPcdSetEx8',
+ 'LibPcdSetEx16',
+ 'LibPcdSetEx32',
+ 'LibPcdSetEx64',
+ 'LibPcdSetExPtr',
+ 'LibPcdSetExBool',
+ 'GetVariable',
+ 'GetEfiGlobalVariable',
+ )
+
+ for IdentifierTable in EccGlobalData.gIdentifierTableList:
+ SqlCommand = """select ID, Name, BelongsToFile from %s
+ where Model = %s """ % (IdentifierTable, MODEL_IDENTIFIER_FUNCTION_CALLING)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ for Key in DeprecatedFunctionSet:
+ if Key == Record[1]:
+ if not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_NO_DEPRECATE, Key):
+ OtherMsg = 'The function [%s] is deprecated which should NOT be used' % Key
+ EccGlobalData.gDb.TblReport.Insert(ERROR_C_FUNCTION_LAYOUT_CHECK_NO_DEPRECATE,
+ OtherMsg=OtherMsg,
+ BelongsToTable=IdentifierTable,
+ BelongsToItem=Record[0])
+
+ def WalkTree(self):
+ IgnoredPattern = c.GetIgnoredDirListPattern()
+ for Dirpath, Dirnames, Filenames in os.walk(EccGlobalData.gTarget):
+ for Dir in Dirnames:
+ Dirname = os.path.join(Dirpath, Dir)
+ if os.path.islink(Dirname):
+ Dirname = os.path.realpath(Dirname)
+ if os.path.isdir(Dirname):
+ # symlinks to directories are treated as directories
+ Dirnames.remove(Dir)
+ Dirnames.append(Dirname)
+ if IgnoredPattern.match(Dirpath.upper()):
+ continue
+ for f in Filenames[:]:
+ if f.lower() in EccGlobalData.gConfig.SkipFileList:
+ Filenames.remove(f)
+ yield (Dirpath, Dirnames, Filenames)
+
+ # Check whether return type exists and in the first line
+ def FunctionLayoutCheckReturnType(self):
+ if EccGlobalData.gConfig.CFunctionLayoutCheckReturnType == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking function layout return type ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c', '.h'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckFuncLayoutReturnType(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ c.CheckFuncLayoutReturnType(FullName)
+
+ # Check whether any optional functional modifiers exist and next to the return type
+ def FunctionLayoutCheckModifier(self):
+ if EccGlobalData.gConfig.CFunctionLayoutCheckOptionalFunctionalModifier == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking function layout modifier ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c', '.h'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckFuncLayoutModifier(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ c.CheckFuncLayoutModifier(FullName)
+
+ # Check whether the next line contains the function name, left justified, followed by the beginning of the parameter list
+ # Check whether the closing parenthesis is on its own line and also indented two spaces
+ def FunctionLayoutCheckName(self):
+ if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionName == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking function layout function name ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c', '.h'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckFuncLayoutName(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ c.CheckFuncLayoutName(FullName)
+
+ # Check whether the function prototypes in include files have the same form as function definitions
+ def FunctionLayoutCheckPrototype(self):
+ if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionPrototype == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking function layout function prototype ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[PROTOTYPE]" + FullName)
+# c.CheckFuncLayoutPrototype(FullName)
+ for FullName in EccGlobalData.gCFileList:
+ EdkLogger.quiet("[PROTOTYPE]" + FullName)
+ c.CheckFuncLayoutPrototype(FullName)
+
+ # Check whether the body of a function is contained by open and close braces that must be in the first column
+ def FunctionLayoutCheckBody(self):
+ if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionBody == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking function layout function body ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckFuncLayoutBody(FullName)
+ for FullName in EccGlobalData.gCFileList:
+ c.CheckFuncLayoutBody(FullName)
+
+ # Check whether the data declarations is the first code in a module.
+ # self.CFunctionLayoutCheckDataDeclaration = 1
+ # Check whether no initialization of a variable as part of its declaration
+ def FunctionLayoutCheckLocalVariable(self):
+ if EccGlobalData.gConfig.CFunctionLayoutCheckNoInitOfVariable == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking function layout local variables ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckFuncLayoutLocalVariable(FullName)
+
+ for FullName in EccGlobalData.gCFileList:
+ c.CheckFuncLayoutLocalVariable(FullName)
+
+ # Check whether no use of STATIC for functions
+ # self.CFunctionLayoutCheckNoStatic = 1
+
+ # Declarations and Data Types Checking
+ def DeclAndDataTypeCheck(self):
+ self.DeclCheckNoUseCType()
+ self.DeclCheckInOutModifier()
+ self.DeclCheckEFIAPIModifier()
+ self.DeclCheckEnumeratedType()
+ self.DeclCheckStructureDeclaration()
+ self.DeclCheckSameStructure()
+ self.DeclCheckUnionType()
+
+
+ # Check whether no use of int, unsigned, char, void, long in any .c, .h or .asl files.
+ def DeclCheckNoUseCType(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckNoUseCType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Declaration No use C type ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckDeclNoUseCType(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ c.CheckDeclNoUseCType(FullName)
+
+ # Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
+ def DeclCheckInOutModifier(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckInOutModifier == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Declaration argument modifier ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckDeclArgModifier(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ c.CheckDeclArgModifier(FullName)
+
+ # Check whether the EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols
+ def DeclCheckEFIAPIModifier(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckEFIAPIModifier == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ pass
+
+ # Check whether Enumerated Type has a 'typedef' and the name is capital
+ def DeclCheckEnumeratedType(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckEnumeratedType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Declaration enum typedef ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[ENUM]" + FullName)
+# c.CheckDeclEnumTypedef(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ EdkLogger.quiet("[ENUM]" + FullName)
+ c.CheckDeclEnumTypedef(FullName)
+
+ # Check whether Structure Type has a 'typedef' and the name is capital
+ def DeclCheckStructureDeclaration(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckStructureDeclaration == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Declaration struct typedef ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[STRUCT]" + FullName)
+# c.CheckDeclStructTypedef(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ EdkLogger.quiet("[STRUCT]" + FullName)
+ c.CheckDeclStructTypedef(FullName)
+
+ # Check whether having same Structure
+ def DeclCheckSameStructure(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckSameStructure == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking same struct ...")
+ AllStructure = {}
+ for IdentifierTable in EccGlobalData.gIdentifierTableList:
+ SqlCommand = """select ID, Name, BelongsToFile from %s where Model = %s""" % (IdentifierTable, MODEL_IDENTIFIER_STRUCTURE)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ if Record[1] != '':
+ if Record[1] not in AllStructure.keys():
+ AllStructure[Record[1]] = Record[2]
+ else:
+ ID = AllStructure[Record[1]]
+ SqlCommand = """select FullPath from File where ID = %s """ % ID
+ NewRecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ OtherMsg = "The structure name '%s' is duplicate" % Record[1]
+ if NewRecordSet != []:
+ OtherMsg = "The structure name [%s] is duplicate with the one defined in %s, maybe struct NOT typedefed or the typedef new type NOT used to qualify variables" % (Record[1], NewRecordSet[0][0])
+ if not EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_SAME_STRUCTURE, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_DECLARATION_DATA_TYPE_CHECK_SAME_STRUCTURE, OtherMsg=OtherMsg, BelongsToTable=IdentifierTable, BelongsToItem=Record[0])
+
+ # Check whether Union Type has a 'typedef' and the name is capital
+ def DeclCheckUnionType(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckUnionType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Declaration union typedef ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[UNION]" + FullName)
+# c.CheckDeclUnionTypedef(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ EdkLogger.quiet("[UNION]" + FullName)
+ c.CheckDeclUnionTypedef(FullName)
+
+ # Predicate Expression Checking
+ def PredicateExpressionCheck(self):
+ self.PredicateExpressionCheckBooleanValue()
+ self.PredicateExpressionCheckNonBooleanOperator()
+ self.PredicateExpressionCheckComparisonNullType()
+
+ # Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
+ def PredicateExpressionCheckBooleanValue(self):
+ if EccGlobalData.gConfig.PredicateExpressionCheckBooleanValue == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking predicate expression Boolean value ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[BOOLEAN]" + FullName)
+# c.CheckBooleanValueComparison(FullName)
+ for FullName in EccGlobalData.gCFileList:
+ EdkLogger.quiet("[BOOLEAN]" + FullName)
+ c.CheckBooleanValueComparison(FullName)
+
+ # Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
+ def PredicateExpressionCheckNonBooleanOperator(self):
+ if EccGlobalData.gConfig.PredicateExpressionCheckNonBooleanOperator == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking predicate expression Non-Boolean variable...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[NON-BOOLEAN]" + FullName)
+# c.CheckNonBooleanValueComparison(FullName)
+ for FullName in EccGlobalData.gCFileList:
+ EdkLogger.quiet("[NON-BOOLEAN]" + FullName)
+ c.CheckNonBooleanValueComparison(FullName)
+
+ # Check whether a comparison of any pointer to zero must be done via the NULL type
+ def PredicateExpressionCheckComparisonNullType(self):
+ if EccGlobalData.gConfig.PredicateExpressionCheckComparisonNullType == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking predicate expression NULL pointer ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[POINTER]" + FullName)
+# c.CheckPointerNullComparison(FullName)
+ for FullName in EccGlobalData.gCFileList:
+ EdkLogger.quiet("[POINTER]" + FullName)
+ c.CheckPointerNullComparison(FullName)
+
+ # Include file checking
+ def IncludeFileCheck(self):
+ self.IncludeFileCheckIfndef()
+ self.IncludeFileCheckData()
+ self.IncludeFileCheckSameName()
+
+ # Check whether having include files with same name
+ def IncludeFileCheckSameName(self):
+ if EccGlobalData.gConfig.IncludeFileCheckSameName == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking same header file name ...")
+ SqlCommand = """select ID, FullPath from File
+ where Model = 1002 order by Name """
+ RecordDict = {}
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ List = Record[1].replace('/', '\\').split('\\')
+ if len(List) >= 2:
+ Key = List[-2] + '\\' + List[-1]
+ else:
+ Key = List[0]
+ if Key not in RecordDict:
+ RecordDict[Key] = [Record]
+ else:
+ RecordDict[Key].append(Record)
+
+ for Key in RecordDict:
+ if len(RecordDict[Key]) > 1:
+ for Item in RecordDict[Key]:
+ Path = mws.relpath(Item[1], EccGlobalData.gWorkspace)
+ if not EccGlobalData.gException.IsException(ERROR_INCLUDE_FILE_CHECK_NAME, Path):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_INCLUDE_FILE_CHECK_NAME, OtherMsg="The file name for [%s] is duplicate" % Path, BelongsToTable='File', BelongsToItem=Item[0])
+
+ # Check whether all include file contents is guarded by a #ifndef statement.
+ def IncludeFileCheckIfndef(self):
+ if EccGlobalData.gConfig.IncludeFileCheckIfndefStatement == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking header file ifndef ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h'):
+# FullName = os.path.join(Dirpath, F)
+# MsgList = c.CheckHeaderFileIfndef(FullName)
+ for FullName in EccGlobalData.gHFileList:
+ MsgList = c.CheckHeaderFileIfndef(FullName)
+
+ # Check whether include files NOT contain code or define data variables
+ def IncludeFileCheckData(self):
+ if EccGlobalData.gConfig.IncludeFileCheckData == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking header file data ...")
+
+ # Get all typedef functions
+ gAllTypedefFun = []
+ for IdentifierTable in EccGlobalData.gIdentifierTableList:
+ SqlCommand = """select Name from %s
+ where Model = %s """ % (IdentifierTable, MODEL_IDENTIFIER_TYPEDEF)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ if Record[0].startswith('('):
+ gAllTypedefFun.append(Record[0])
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h'):
+# FullName = os.path.join(Dirpath, F)
+# MsgList = c.CheckHeaderFileData(FullName)
+ for FullName in EccGlobalData.gHFileList:
+ MsgList = c.CheckHeaderFileData(FullName, gAllTypedefFun)
+
+ # Doxygen document checking
+ def DoxygenCheck(self):
+ self.DoxygenCheckFileHeader()
+ self.DoxygenCheckFunctionHeader()
+ self.DoxygenCheckCommentDescription()
+ self.DoxygenCheckCommentFormat()
+ self.DoxygenCheckCommand()
+
+ # Check whether the file headers are followed Doxygen special documentation blocks in section 2.3.5
+ def DoxygenCheckFileHeader(self):
+ if EccGlobalData.gConfig.DoxygenCheckFileHeader == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Doxygen file header ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ Ext = os.path.splitext(F)[1]
+ if Ext in ('.h', '.c'):
+ FullName = os.path.join(Dirpath, F)
+ MsgList = c.CheckFileHeaderDoxygenComments(FullName)
+ elif Ext in ('.inf', '.dec', '.dsc', '.fdf'):
+ FullName = os.path.join(Dirpath, F)
+ op = open(FullName).readlines()
+ FileLinesList = op
+ LineNo = 0
+ CurrentSection = MODEL_UNKNOWN
+ HeaderSectionLines = []
+ HeaderCommentStart = False
+ HeaderCommentEnd = False
+
+ for Line in FileLinesList:
+ LineNo = LineNo + 1
+ Line = Line.strip()
+ if (LineNo < len(FileLinesList) - 1):
+ NextLine = FileLinesList[LineNo].strip()
+
+ #
+ # blank line
+ #
+ if (Line == '' or not Line) and LineNo == len(FileLinesList):
+ LastSectionFalg = True
+
+ #
+ # check whether file header comment section started
+ #
+ if Line.startswith('#') and \
+ (Line.find('@file') > -1) and \
+ not HeaderCommentStart:
+ if CurrentSection != MODEL_UNKNOWN:
+ SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName
+ ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ Msg = 'INF/DEC/DSC/FDF file header comment should begin with ""## @file"" or ""# @file""at the very top file'
+ EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
+
+ else:
+ CurrentSection = MODEL_IDENTIFIER_FILE_HEADER
+ #
+ # Append the first line to section lines.
+ #
+ HeaderSectionLines.append((Line, LineNo))
+ HeaderCommentStart = True
+ continue
+
+ #
+ # Collect Header content.
+ #
+ if (Line.startswith('#') and CurrentSection == MODEL_IDENTIFIER_FILE_HEADER) and\
+ HeaderCommentStart and not Line.startswith('##') and not\
+ HeaderCommentEnd and NextLine != '':
+ HeaderSectionLines.append((Line, LineNo))
+ continue
+ #
+ # Header content end
+ #
+ if (Line.startswith('##') or not Line.strip().startswith("#")) and HeaderCommentStart \
+ and not HeaderCommentEnd:
+ if Line.startswith('##'):
+ HeaderCommentEnd = True
+ HeaderSectionLines.append((Line, LineNo))
+ ParseHeaderCommentSection(HeaderSectionLines, FullName)
+ break
+ if HeaderCommentStart == False:
+ SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName
+ ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ Msg = 'INF/DEC/DSC/FDF file header comment should begin with ""## @file"" or ""# @file"" at the very top file'
+ EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
+ if HeaderCommentEnd == False:
+ SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName
+ ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ Msg = 'INF/DEC/DSC/FDF file header comment should end with ""##"" at the end of file header comment block'
+ # Check whether File header Comment End with '##'
+ if EccGlobalData.gConfig.HeaderCheckFileCommentEnd == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
+
+
+
+ # Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
+ def DoxygenCheckFunctionHeader(self):
+ if EccGlobalData.gConfig.DoxygenCheckFunctionHeader == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Doxygen function header ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# MsgList = c.CheckFuncHeaderDoxygenComments(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ MsgList = c.CheckFuncHeaderDoxygenComments(FullName)
+
+
+ # Check whether the first line of text in a comment block is a brief description of the element being documented.
+ # The brief description must end with a period.
+ def DoxygenCheckCommentDescription(self):
+ if EccGlobalData.gConfig.DoxygenCheckCommentDescription == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ pass
+
+ # Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
+ def DoxygenCheckCommentFormat(self):
+ if EccGlobalData.gConfig.DoxygenCheckCommentFormat == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Doxygen comment ///< ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# MsgList = c.CheckDoxygenTripleForwardSlash(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ MsgList = c.CheckDoxygenTripleForwardSlash(FullName)
+
+ # Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
+ def DoxygenCheckCommand(self):
+ if EccGlobalData.gConfig.DoxygenCheckCommand == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Doxygen command ...")
+
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# MsgList = c.CheckDoxygenCommand(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ MsgList = c.CheckDoxygenCommand(FullName)
+
+ # Meta-Data File Processing Checking
+ def MetaDataFileCheck(self):
+ self.MetaDataFileCheckPathName()
+ self.MetaDataFileCheckGenerateFileList()
+ self.MetaDataFileCheckLibraryInstance()
+ self.MetaDataFileCheckLibraryInstanceDependent()
+ self.MetaDataFileCheckLibraryInstanceOrder()
+ self.MetaDataFileCheckLibraryNoUse()
+ self.MetaDataFileCheckLibraryDefinedInDec()
+ self.MetaDataFileCheckBinaryInfInFdf()
+ self.MetaDataFileCheckPcdDuplicate()
+ self.MetaDataFileCheckPcdFlash()
+ self.MetaDataFileCheckPcdNoUse()
+ self.MetaDataFileCheckGuidDuplicate()
+ self.MetaDataFileCheckModuleFileNoUse()
+ self.MetaDataFileCheckPcdType()
+ self.MetaDataFileCheckModuleFileGuidDuplication()
+ self.MetaDataFileCheckModuleFileGuidFormat()
+ self.MetaDataFileCheckModuleFileProtocolFormat()
+ self.MetaDataFileCheckModuleFilePpiFormat()
+ self.MetaDataFileCheckModuleFilePcdFormat()
+
+ # Check whether each file defined in meta-data exists
+ def MetaDataFileCheckPathName(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckPathName == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ # This item is covered when parsing Inf/Dec/Dsc files
+ pass
+
+ # Generate a list for all files defined in meta-data files
+ def MetaDataFileCheckGenerateFileList(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckGenerateFileList == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ # This item is covered when parsing Inf/Dec/Dsc files
+ pass
+
+ # Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
+ # Each Library Instance must specify the Supported Module Types in its Inf file,
+ # and any module specifying the library instance must be one of the supported types.
+ def MetaDataFileCheckLibraryInstance(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckLibraryInstance == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for library instance type issue ...")
+ SqlCommand = """select A.ID, A.Value3, B.Value3 from Inf as A left join Inf as B
+ where A.Value2 = 'LIBRARY_CLASS' and A.Model = %s
+ and B.Value2 = 'MODULE_TYPE' and B.Model = %s and A.BelongsToFile = B.BelongsToFile
+ group by A.BelongsToFile""" % (MODEL_META_DATA_HEADER, MODEL_META_DATA_HEADER)
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ LibraryClasses = {}
+ for Record in RecordSet:
+ List = Record[1].split('|', 1)
+ SupModType = []
+ if len(List) == 1:
+ SupModType = DT.SUP_MODULE_LIST_STRING.split(DT.TAB_VALUE_SPLIT)
+ elif len(List) == 2:
+ SupModType = List[1].split()
+
+ if List[0] not in LibraryClasses:
+ LibraryClasses[List[0]] = SupModType
+ else:
+ for Item in SupModType:
+ if Item not in LibraryClasses[List[0]]:
+ LibraryClasses[List[0]].append(Item)
+
+ if Record[2] != DT.SUP_MODULE_BASE and Record[2] not in SupModType:
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_2, OtherMsg="The Library Class '%s' does not specify its supported module types" % (List[0]), BelongsToTable='Inf', BelongsToItem=Record[0])
+
+ SqlCommand = """select A.ID, A.Value1, B.Value3 from Inf as A left join Inf as B
+ where A.Model = %s and B.Value2 = '%s' and B.Model = %s
+ and B.BelongsToFile = A.BelongsToFile""" \
+ % (MODEL_EFI_LIBRARY_CLASS, 'MODULE_TYPE', MODEL_META_DATA_HEADER)
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ # Merge all LibraryClasses' supmodlist
+ RecordDict = {}
+ for Record in RecordSet:
+ if Record[1] not in RecordDict:
+ RecordDict[Record[1]] = [str(Record[2])]
+ else:
+ if Record[2] not in RecordDict[Record[1]]:
+ RecordDict[Record[1]].append(Record[2])
+
+ for Record in RecordSet:
+ if Record[1] in LibraryClasses:
+ if Record[2] not in LibraryClasses[Record[1]] and DT.SUP_MODULE_BASE not in RecordDict[Record[1]]:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1, OtherMsg="The type of Library Class [%s] defined in Inf file does not match the type of the module" % (Record[1]), BelongsToTable='Inf', BelongsToItem=Record[0])
+ else:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1, OtherMsg="The type of Library Class [%s] defined in Inf file does not match the type of the module" % (Record[1]), BelongsToTable='Inf', BelongsToItem=Record[0])
+
+ # Check whether a Library Instance has been defined for all dependent library classes
+ def MetaDataFileCheckLibraryInstanceDependent(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckLibraryInstanceDependent == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for library instance dependent issue ...")
+ SqlCommand = """select ID, Value1, Value2 from Dsc where Model = %s""" % MODEL_EFI_LIBRARY_CLASS
+ LibraryClasses = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)
+ for LibraryClass in LibraryClasses:
+ if LibraryClass[1].upper() == 'NULL' or LibraryClass[1].startswith('!ifdef') or LibraryClass[1].startswith('!ifndef') or LibraryClass[1].endswith('!endif'):
+ continue
+ else:
+ LibraryIns = os.path.normpath(mws.join(EccGlobalData.gWorkspace, LibraryClass[2]))
+ SkipDirString = '|'.join(EccGlobalData.gConfig.SkipDirList)
+ p = re.compile(r'.*[\\/](?:%s^\S)[\\/]?.*' % SkipDirString)
+ if p.match(os.path.split(LibraryIns)[0].upper()):
+ continue
+ SqlCommand = """select Value3 from Inf where BelongsToFile =
+ (select ID from File where lower(FullPath) = lower('%s'))
+ and Value2 = '%s'""" % (LibraryIns, DT.PLATFORM_COMPONENT_TYPE_LIBRARY_CLASS)
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ IsFound = False
+ for Record in RecordSet:
+ LibName = Record[0].split('|', 1)[0]
+ if LibraryClass[1] == LibName:
+ IsFound = True
+ if not IsFound:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_DEPENDENT, LibraryClass[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_DEPENDENT, OtherMsg="The Library Class [%s] is not specified in '%s'" % (LibraryClass[1], LibraryClass[2]), BelongsToTable='Dsc', BelongsToItem=LibraryClass[0])
+
+ # Check whether the Library Instances specified by the LibraryClasses sections are listed in order of dependencies
+ def MetaDataFileCheckLibraryInstanceOrder(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckLibraryInstanceOrder == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ # This checkpoint is not necessary for Ecc check
+ pass
+
+ # Check whether the unnecessary inclusion of library classes in the Inf file
+ # Check whether the unnecessary duplication of library classe names in the DSC file
+ def MetaDataFileCheckLibraryNoUse(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckLibraryNoUse == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for library instance not used ...")
+ SqlCommand = """select ID, Value1 from Inf as A where A.Model = %s and A.Value1 not in (select B.Value1 from Dsc as B where Model = %s)""" % (MODEL_EFI_LIBRARY_CLASS, MODEL_EFI_LIBRARY_CLASS)
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_NO_USE, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_NO_USE, OtherMsg="The Library Class [%s] is not used in any platform" % (Record[1]), BelongsToTable='Inf', BelongsToItem=Record[0])
+ SqlCommand = """
+ select A.ID, A.Value1, A.BelongsToFile, A.StartLine, B.StartLine from Dsc as A left join Dsc as B
+ where A.Model = %s and B.Model = %s and A.Scope1 = B.Scope1 and A.Scope2 = B.Scope2 and A.ID != B.ID
+ and A.Value1 = B.Value1 and A.Value2 != B.Value2 and A.BelongsToItem = -1 and B.BelongsToItem = -1 and A.StartLine != B.StartLine and B.BelongsToFile = A.BelongsToFile""" \
+ % (MODEL_EFI_LIBRARY_CLASS, MODEL_EFI_LIBRARY_CLASS)
+ RecordSet = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)
+ for Record in RecordSet:
+ if Record[3] and Record[4] and Record[3] != Record[4] and Record[1] != 'NULL':
+ SqlCommand = """select FullPath from File where ID = %s""" % (Record[2])
+ FilePathList = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for FilePath in FilePathList:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_NAME_DUPLICATE, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_NAME_DUPLICATE, OtherMsg="The Library Class [%s] is duplicated in '%s' line %s and line %s." % (Record[1], FilePath, Record[3], Record[4]), BelongsToTable='Dsc', BelongsToItem=Record[0])
+
+ # Check the header file in Include\Library directory whether be defined in the package DEC file.
+ def MetaDataFileCheckLibraryDefinedInDec(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckLibraryDefinedInDec == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for library instance whether be defined in the package dec file ...")
+ SqlCommand = """
+ select A.Value1, A.StartLine, A.ID, B.Value1 from Inf as A left join Dec as B
+ on A.Model = B.Model and A.Value1 = B.Value1 where A.Model=%s
+ """ % MODEL_EFI_LIBRARY_CLASS
+ RecordSet = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)
+ for Record in RecordSet:
+ LibraryInInf, Line, ID, LibraryDec = Record
+ if not LibraryDec:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_NOT_DEFINED, LibraryInInf):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_NOT_DEFINED, \
+ OtherMsg="The Library Class [%s] in %s line is not defined in the associated package file." % (LibraryInInf, Line),
+ BelongsToTable='Inf', BelongsToItem=ID)
+
+ # Check whether an Inf file is specified in the FDF file, but not in the Dsc file, then the Inf file must be for a Binary module only
+ def MetaDataFileCheckBinaryInfInFdf(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckBinaryInfInFdf == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for non-binary modules defined in FDF files ...")
+ SqlCommand = """select A.ID, A.Value1 from Fdf as A
+ where A.Model = %s
+ and A.Enabled > -1
+ and A.Value1 not in
+ (select B.Value1 from Dsc as B
+ where B.Model = %s
+ and B.Enabled > -1)""" % (MODEL_META_DATA_COMPONENT, MODEL_META_DATA_COMPONENT)
+ RecordSet = EccGlobalData.gDb.TblFdf.Exec(SqlCommand)
+ for Record in RecordSet:
+ FdfID = Record[0]
+ FilePath = Record[1]
+ FilePath = os.path.normpath(mws.join(EccGlobalData.gWorkspace, FilePath))
+ SqlCommand = """select ID from Inf where Model = %s and BelongsToFile = (select ID from File where FullPath like '%s')
+ """ % (MODEL_EFI_SOURCE_FILE, FilePath)
+ NewRecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ if NewRecordSet != []:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_BINARY_INF_IN_FDF, FilePath):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_BINARY_INF_IN_FDF, OtherMsg="File [%s] defined in FDF file and not in DSC file must be a binary module" % (FilePath), BelongsToTable='Fdf', BelongsToItem=FdfID)
+
+ # Check whether a PCD is set in a Dsc file or the FDF file, but not in both.
+ def MetaDataFileCheckPcdDuplicate(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckPcdDuplicate == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for duplicate PCDs defined in both DSC and FDF files ...")
+ SqlCommand = """
+ select A.ID, A.Value1, A.Value2, A.BelongsToFile, B.ID, B.Value1, B.Value2, B.BelongsToFile from Dsc as A, Fdf as B
+ where A.Model >= %s and A.Model < %s
+ and B.Model >= %s and B.Model < %s
+ and A.Value1 = B.Value1
+ and A.Value2 = B.Value2
+ and A.Enabled > -1
+ and B.Enabled > -1
+ group by A.ID
+ """ % (MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER)
+ RecordSet = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)
+ for Record in RecordSet:
+ SqlCommand1 = """select Name from File where ID = %s""" % Record[3]
+ SqlCommand2 = """select Name from File where ID = %s""" % Record[7]
+ DscFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand1)[0][0])[0]
+ FdfFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand2)[0][0])[0]
+ if DscFileName != FdfFileName:
+ continue
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[1] + '.' + Record[2]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg="The PCD [%s] is defined in both FDF file and DSC file" % (Record[1] + '.' + Record[2]), BelongsToTable='Dsc', BelongsToItem=Record[0])
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[5] + '.' + Record[6]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg="The PCD [%s] is defined in both FDF file and DSC file" % (Record[5] + '.' + Record[6]), BelongsToTable='Fdf', BelongsToItem=Record[4])
+
+ EdkLogger.quiet("Checking for duplicate PCDs defined in DEC files ...")
+ SqlCommand = """
+ select A.ID, A.Value1, A.Value2, A.Model, B.Model from Dec as A left join Dec as B
+ where A.Model >= %s and A.Model < %s
+ and B.Model >= %s and B.Model < %s
+ and A.Value1 = B.Value1
+ and A.Value2 = B.Value2
+ and A.Scope1 = B.Scope1
+ and A.ID != B.ID
+ and A.Model = B.Model
+ and A.Enabled > -1
+ and B.Enabled > -1
+ and A.BelongsToFile = B.BelongsToFile
+ group by A.ID
+ """ % (MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER)
+ RecordSet = EccGlobalData.gDb.TblDec.Exec(SqlCommand)
+ for Record in RecordSet:
+ RecordCat = Record[1] + '.' + Record[2]
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, RecordCat):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg="The PCD [%s] is defined duplicated in DEC file" % RecordCat, BelongsToTable='Dec', BelongsToItem=Record[0])
+
+ # Check whether PCD settings in the FDF file can only be related to flash.
+ def MetaDataFileCheckPcdFlash(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckPcdFlash == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking only Flash related PCDs are used in FDF ...")
+ SqlCommand = """
+ select ID, Value1, Value2, BelongsToFile from Fdf as A
+ where A.Model >= %s and Model < %s
+ and A.Enabled > -1
+ and A.Value2 not like '%%Flash%%'
+ """ % (MODEL_PCD, MODEL_META_DATA_HEADER)
+ RecordSet = EccGlobalData.gDb.TblFdf.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_FLASH, Record[1] + '.' + Record[2]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_FLASH, OtherMsg="The PCD [%s] defined in FDF file is not related to Flash" % (Record[1] + '.' + Record[2]), BelongsToTable='Fdf', BelongsToItem=Record[0])
+
+ # Check whether PCDs used in Inf files but not specified in Dsc or FDF files
+ def MetaDataFileCheckPcdNoUse(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckPcdNoUse == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for non-specified PCDs ...")
+ SqlCommand = """
+ select ID, Value1, Value2, BelongsToFile from Inf as A
+ where A.Model >= %s and Model < %s
+ and A.Enabled > -1
+ and (A.Value1, A.Value2) not in
+ (select Value1, Value2 from Dsc as B
+ where B.Model >= %s and B.Model < %s
+ and B.Enabled > -1)
+ and (A.Value1, A.Value2) not in
+ (select Value1, Value2 from Fdf as C
+ where C.Model >= %s and C.Model < %s
+ and C.Enabled > -1)
+ """ % (MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER)
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_NO_USE, Record[1] + '.' + Record[2]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_NO_USE, OtherMsg="The PCD [%s] defined in INF file is not specified in either DSC or FDF files" % (Record[1] + '.' + Record[2]), BelongsToTable='Inf', BelongsToItem=Record[0])
+
+ # Check whether having duplicate guids defined for Guid/Protocol/Ppi
+ def MetaDataFileCheckGuidDuplicate(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckGuidDuplicate == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for duplicate GUID/PPI/PROTOCOL ...")
+ # Check Guid
+ self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID, MODEL_EFI_GUID, EccGlobalData.gDb.TblDec)
+ self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID, MODEL_EFI_GUID, EccGlobalData.gDb.TblDsc)
+ self.CheckGuidProtocolPpiValue(ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID, MODEL_EFI_GUID)
+ # Check protocol
+ self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL, MODEL_EFI_PROTOCOL, EccGlobalData.gDb.TblDec)
+ self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL, MODEL_EFI_PROTOCOL, EccGlobalData.gDb.TblDsc)
+ self.CheckGuidProtocolPpiValue(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL, MODEL_EFI_PROTOCOL)
+ # Check ppi
+ self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI, MODEL_EFI_PPI, EccGlobalData.gDb.TblDec)
+ self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI, MODEL_EFI_PPI, EccGlobalData.gDb.TblDsc)
+ self.CheckGuidProtocolPpiValue(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI, MODEL_EFI_PPI)
+
+ # Check whether all files under module directory are described in INF files
+ def MetaDataFileCheckModuleFileNoUse(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckModuleFileNoUse == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for no used module files ...")
+ SqlCommand = """
+ select upper(Path) from File where ID in (select BelongsToFile from Inf where BelongsToFile != -1)
+ """
+ InfPathSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ InfPathList = []
+ for Item in InfPathSet:
+ if Item[0] not in InfPathList:
+ InfPathList.append(Item[0])
+ SqlCommand = """
+ select ID, Path, FullPath from File where upper(FullPath) not in
+ (select upper(A.Path) || '%s' || upper(B.Value1) from File as A, INF as B
+ where A.ID in (select BelongsToFile from INF where Model = %s group by BelongsToFile) and
+ B.BelongsToFile = A.ID and B.Model = %s)
+ and (Model = %s or Model = %s)
+ """ % (os.sep, MODEL_EFI_SOURCE_FILE, MODEL_EFI_SOURCE_FILE, MODEL_FILE_C, MODEL_FILE_H)
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ for Record in RecordSet:
+ Path = Record[1]
+ Path = Path.upper().replace('\X64', '').replace('\IA32', '').replace('\EBC', '').replace('\IPF', '').replace('\ARM', '')
+ if Path in InfPathList:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE, Record[2]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE, OtherMsg="The source file [%s] is existing in module directory but it is not described in INF file." % (Record[2]), BelongsToTable='File', BelongsToItem=Record[0])
+
+ # Check whether the PCD is correctly used in C function via its type
+ def MetaDataFileCheckPcdType(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckPcdType == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for pcd type in c code function usage ...")
+ SqlCommand = """
+ select ID, Model, Value1, Value2, BelongsToFile from INF where Model > %s and Model < %s
+ """ % (MODEL_PCD, MODEL_META_DATA_HEADER)
+ PcdSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ for Pcd in PcdSet:
+ Model = Pcd[1]
+ PcdName = Pcd[2]
+ if Pcd[3]:
+ PcdName = Pcd[3]
+ BelongsToFile = Pcd[4]
+ SqlCommand = """
+ select ID from File where FullPath in
+ (select B.Path || '%s' || A.Value1 from INF as A, File as B where A.Model = %s and A.BelongsToFile = %s
+ and B.ID = %s and (B.Model = %s or B.Model = %s))
+ """ % (os.sep, MODEL_EFI_SOURCE_FILE, BelongsToFile, BelongsToFile, MODEL_FILE_C, MODEL_FILE_H)
+ TableSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Tbl in TableSet:
+ TblName = 'Identifier' + str(Tbl[0])
+ SqlCommand = """
+ select Name, ID from %s where value like '%s' and Model = %s
+ """ % (TblName, PcdName, MODEL_IDENTIFIER_FUNCTION_CALLING)
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ TblNumber = TblName.replace('Identifier', '')
+ for Record in RecordSet:
+ FunName = Record[0]
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_TYPE, FunName):
+ if Model in [MODEL_PCD_FIXED_AT_BUILD] and not FunName.startswith('FixedPcdGet'):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_TYPE, OtherMsg="The pcd '%s' is defined as a FixPcd but now it is called by c function [%s]" % (PcdName, FunName), BelongsToTable=TblName, BelongsToItem=Record[1])
+ if Model in [MODEL_PCD_FEATURE_FLAG] and (not FunName.startswith('FeaturePcdGet') and not FunName.startswith('FeaturePcdSet')):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_TYPE, OtherMsg="The pcd '%s' is defined as a FeaturePcd but now it is called by c function [%s]" % (PcdName, FunName), BelongsToTable=TblName, BelongsToItem=Record[1])
+ if Model in [MODEL_PCD_PATCHABLE_IN_MODULE] and (not FunName.startswith('PatchablePcdGet') and not FunName.startswith('PatchablePcdSet')):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_TYPE, OtherMsg="The pcd '%s' is defined as a PatchablePcd but now it is called by c function [%s]" % (PcdName, FunName), BelongsToTable=TblName, BelongsToItem=Record[1])
+
+ #ERROR_META_DATA_FILE_CHECK_PCD_TYPE
+ pass
+
+ # Internal worker function to get the INF workspace relative path from FileID
+ def GetInfFilePathFromID(self, FileID):
+ Table = EccGlobalData.gDb.TblFile
+ SqlCommand = """select A.FullPath from %s as A where A.ID = %s""" % (Table.Table, FileID)
+ RecordSet = Table.Exec(SqlCommand)
+ Path = ""
+ for Record in RecordSet:
+ Path = mws.relpath(Record[0], EccGlobalData.gWorkspace)
+ return Path
+
+ # Check whether two module INFs under one workspace has the same FILE_GUID value
+ def MetaDataFileCheckModuleFileGuidDuplication(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckModuleFileGuidDuplication == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for pcd type in c code function usage ...")
+ Table = EccGlobalData.gDb.TblInf
+ SqlCommand = """
+ select A.ID, A.Value3, A.BelongsToFile, B.BelongsToFile from %s as A, %s as B
+ where A.Value2 = 'FILE_GUID' and B.Value2 = 'FILE_GUID' and
+ A.Value3 = B.Value3 and A.ID != B.ID group by A.ID
+ """ % (Table.Table, Table.Table)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+ InfPath1 = self.GetInfFilePathFromID(Record[2])
+ InfPath2 = self.GetInfFilePathFromID(Record[3])
+ if InfPath1 and InfPath2:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, InfPath1):
+ Msg = "The FILE_GUID of INF file [%s] is duplicated with that of %s" % (InfPath1, InfPath2)
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
+
+
+ # Check Guid Format in module INF
+ def MetaDataFileCheckModuleFileGuidFormat(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckModuleFileGuidFormat == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Check Guid Format in module INF ...")
+ Table = EccGlobalData.gDb.TblInf
+ SqlCommand = """
+ select ID, Value1, Usage, BelongsToFile from %s where Model = %s group by ID
+ """ % (Table.Table, MODEL_EFI_GUID)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+ Value1 = Record[1]
+ Value2 = Record[2]
+ GuidCommentList = []
+ InfPath = self.GetInfFilePathFromID(Record[3])
+ Msg = "The GUID format of %s in INF file [%s] does not follow rules" % (Value1, InfPath)
+ if Value2.startswith(DT.TAB_SPECIAL_COMMENT):
+ GuidCommentList = Value2[2:].split(DT.TAB_SPECIAL_COMMENT)
+ if GuidCommentList[0].strip().startswith(DT.TAB_INF_USAGE_UNDEFINED):
+ continue
+ elif len(GuidCommentList) > 1:
+ if not GuidCommentList[0].strip().startswith((DT.TAB_INF_USAGE_PRO,
+ DT.TAB_INF_USAGE_SOME_PRO,
+ DT.TAB_INF_USAGE_CON,
+ DT.TAB_INF_USAGE_SOME_CON)):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_GUID, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
+ if not (GuidCommentList[1].strip()).startswith(DT.TAB_INF_GUIDTYPE_VAR) and \
+ not GuidCommentList[1].strip().startswith((DT.TAB_INF_GUIDTYPE_EVENT,
+ DT.TAB_INF_GUIDTYPE_HII,
+ DT.TAB_INF_GUIDTYPE_FILE,
+ DT.TAB_INF_GUIDTYPE_HOB,
+ DT.TAB_INF_GUIDTYPE_FV,
+ DT.TAB_INF_GUIDTYPE_ST,
+ DT.TAB_INF_GUIDTYPE_TSG,
+ DT.TAB_INF_GUIDTYPE_GUID,
+ DT.TAB_INF_GUIDTYPE_PROTOCOL,
+ DT.TAB_INF_GUIDTYPE_PPI,
+ DT.TAB_INF_USAGE_UNDEFINED)):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_GUID, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
+ else:
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_GUID, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
+ else:
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_GUID, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
+
+ # Check Protocol Format in module INF
+ def MetaDataFileCheckModuleFileProtocolFormat(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckModuleFileProtocolFormat == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Check Protocol Format in module INF ...")
+ Table = EccGlobalData.gDb.TblInf
+ SqlCommand = """
+ select ID, Value1, Usage, BelongsToFile from %s where Model = %s group by ID
+ """ % (Table.Table, MODEL_EFI_PROTOCOL)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+ Value1 = Record[1]
+ Value2 = Record[2]
+ GuidCommentList = []
+ InfPath = self.GetInfFilePathFromID(Record[3])
+ Msg = "The Protocol format of %s in INF file [%s] does not follow rules" % (Value1, InfPath)
+ if Value2.startswith(DT.TAB_SPECIAL_COMMENT):
+ GuidCommentList = Value2[2:].split(DT.TAB_SPECIAL_COMMENT)
+ if len(GuidCommentList) >= 1:
+ if not GuidCommentList[0].strip().startswith((DT.TAB_INF_USAGE_PRO,
+ DT.TAB_INF_USAGE_SOME_PRO,
+ DT.TAB_INF_USAGE_CON,
+ DT.TAB_INF_USAGE_SOME_CON,
+ DT.TAB_INF_USAGE_NOTIFY,
+ DT.TAB_INF_USAGE_TO_START,
+ DT.TAB_INF_USAGE_BY_START,
+ DT.TAB_INF_USAGE_UNDEFINED)):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PROTOCOL, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
+ else:
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PROTOCOL, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
+
+
+ # Check Ppi Format in module INF
+ def MetaDataFileCheckModuleFilePpiFormat(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckModuleFilePpiFormat == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Check Ppi Format in module INF ...")
+ Table = EccGlobalData.gDb.TblInf
+ SqlCommand = """
+ select ID, Value1, Usage, BelongsToFile from %s where Model = %s group by ID
+ """ % (Table.Table, MODEL_EFI_PPI)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+ Value1 = Record[1]
+ Value2 = Record[2]
+ GuidCommentList = []
+ InfPath = self.GetInfFilePathFromID(Record[3])
+ Msg = "The Ppi format of %s in INF file [%s] does not follow rules" % (Value1, InfPath)
+ if Value2.startswith(DT.TAB_SPECIAL_COMMENT):
+ GuidCommentList = Value2[2:].split(DT.TAB_SPECIAL_COMMENT)
+ if len(GuidCommentList) >= 1:
+ if not GuidCommentList[0].strip().startswith((DT.TAB_INF_USAGE_PRO,
+ DT.TAB_INF_USAGE_SOME_PRO,
+ DT.TAB_INF_USAGE_CON,
+ DT.TAB_INF_USAGE_SOME_CON,
+ DT.TAB_INF_USAGE_NOTIFY,
+ DT.TAB_INF_USAGE_UNDEFINED)):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PPI, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
+ else:
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PPI, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
+
+ # Check Pcd Format in module INF
+ def MetaDataFileCheckModuleFilePcdFormat(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckModuleFilePcdFormat == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Check Pcd Format in module INF ...")
+ Table = EccGlobalData.gDb.TblInf
+ SqlCommand = """
+ select ID, Model, Value1, Value2, Usage, BelongsToFile from %s where Model >= %s and Model < %s group by ID
+ """ % (Table.Table, MODEL_PCD, MODEL_META_DATA_HEADER)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+ Model = Record[1]
+ PcdName = Record[2] + '.' + Record[3]
+ Usage = Record[4]
+ PcdCommentList = []
+ InfPath = self.GetInfFilePathFromID(Record[5])
+ Msg = "The Pcd format of %s in INF file [%s] does not follow rules" % (PcdName, InfPath)
+ if Usage.startswith(DT.TAB_SPECIAL_COMMENT):
+ PcdCommentList = Usage[2:].split(DT.TAB_SPECIAL_COMMENT)
+ if len(PcdCommentList) >= 1:
+ if Model in [MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_FEATURE_FLAG] \
+ and not PcdCommentList[0].strip().startswith((DT.TAB_INF_USAGE_SOME_PRO,
+ DT.TAB_INF_USAGE_CON,
+ DT.TAB_INF_USAGE_UNDEFINED)):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PCD, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
+ if Model in [MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX] \
+ and not PcdCommentList[0].strip().startswith((DT.TAB_INF_USAGE_PRO,
+ DT.TAB_INF_USAGE_SOME_PRO,
+ DT.TAB_INF_USAGE_CON,
+ DT.TAB_INF_USAGE_SOME_CON,
+ DT.TAB_INF_USAGE_UNDEFINED)):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PCD, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
+ else:
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PCD, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
+
+ # Check whether these is duplicate Guid/Ppi/Protocol name
+ def CheckGuidProtocolPpi(self, ErrorID, Model, Table):
+ Name = ''
+ if Model == MODEL_EFI_GUID:
+ Name = 'guid'
+ if Model == MODEL_EFI_PROTOCOL:
+ Name = 'protocol'
+ if Model == MODEL_EFI_PPI:
+ Name = 'ppi'
+ SqlCommand = """
+ select A.ID, A.Value1 from %s as A, %s as B
+ where A.Model = %s and B.Model = %s
+ and A.Value1 like B.Value1 and A.ID != B.ID
+ and A.Scope1 = B.Scope1
+ and A.Enabled > -1
+ and B.Enabled > -1
+ group by A.ID
+ """ % (Table.Table, Table.Table, Model, Model)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not EccGlobalData.gException.IsException(ErrorID, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ErrorID, OtherMsg="The %s name [%s] is defined more than one time" % (Name.upper(), Record[1]), BelongsToTable=Table.Table, BelongsToItem=Record[0])
+
+ # Check whether these is duplicate Guid/Ppi/Protocol value
+ def CheckGuidProtocolPpiValue(self, ErrorID, Model):
+ Name = ''
+ Table = EccGlobalData.gDb.TblDec
+ if Model == MODEL_EFI_GUID:
+ Name = 'guid'
+ if Model == MODEL_EFI_PROTOCOL:
+ Name = 'protocol'
+ if Model == MODEL_EFI_PPI:
+ Name = 'ppi'
+ SqlCommand = """
+ select A.ID, A.Value1, A.Value2 from %s as A, %s as B
+ where A.Model = %s and B.Model = %s
+ and A.Value2 like B.Value2 and A.ID != B.ID
+ and A.Scope1 = B.Scope1 and A.Value1 != B.Value1
+ group by A.ID
+ """ % (Table.Table, Table.Table, Model, Model)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not EccGlobalData.gException.IsException(ErrorID, Record[2]):
+ EccGlobalData.gDb.TblReport.Insert(ErrorID, OtherMsg="The %s value [%s] is used more than one time" % (Name.upper(), Record[2]), BelongsToTable=Table.Table, BelongsToItem=Record[0])
+
+ # Naming Convention Check
+ def NamingConventionCheck(self):
+ if EccGlobalData.gConfig.NamingConventionCheckDefineStatement == '1' \
+ or EccGlobalData.gConfig.NamingConventionCheckTypedefStatement == '1' \
+ or EccGlobalData.gConfig.NamingConventionCheckIfndefStatement == '1' \
+ or EccGlobalData.gConfig.NamingConventionCheckVariableName == '1' \
+ or EccGlobalData.gConfig.NamingConventionCheckSingleCharacterVariable == '1' \
+ or EccGlobalData.gConfig.NamingConventionCheckAll == '1'\
+ or EccGlobalData.gConfig.CheckAll == '1':
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.h', '.c'):
+ FullName = os.path.join(Dirpath, F)
+ Id = c.GetTableID(FullName)
+ if Id < 0:
+ continue
+ FileTable = 'Identifier' + str(Id)
+ self.NamingConventionCheckDefineStatement(FileTable)
+ self.NamingConventionCheckTypedefStatement(FileTable)
+ self.NamingConventionCheckVariableName(FileTable)
+ self.NamingConventionCheckSingleCharacterVariable(FileTable)
+ if os.path.splitext(F)[1] in ('.h'):
+ self.NamingConventionCheckIfndefStatement(FileTable)
+
+ self.NamingConventionCheckPathName()
+ self.NamingConventionCheckFunctionName()
+
+ # Check whether only capital letters are used for #define declarations
+ def NamingConventionCheckDefineStatement(self, FileTable):
+ if EccGlobalData.gConfig.NamingConventionCheckDefineStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming convention of #define statement ...")
+
+ SqlCommand = """select ID, Value from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_MACRO_DEFINE)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ Name = Record[1].strip().split()[1]
+ if Name.find('(') != -1:
+ Name = Name[0:Name.find('(')]
+ if Name.upper() != Name:
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_DEFINE_STATEMENT, Name):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_DEFINE_STATEMENT, OtherMsg="The #define name [%s] does not follow the rules" % (Name), BelongsToTable=FileTable, BelongsToItem=Record[0])
+
+ # Check whether only capital letters are used for typedef declarations
+ def NamingConventionCheckTypedefStatement(self, FileTable):
+ if EccGlobalData.gConfig.NamingConventionCheckTypedefStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming convention of #typedef statement ...")
+
+ SqlCommand = """select ID, Name from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_TYPEDEF)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ Name = Record[1].strip()
+ if Name != '' and Name is not None:
+ if Name[0] == '(':
+ Name = Name[1:Name.find(')')]
+ if Name.find('(') > -1:
+ Name = Name[Name.find('(') + 1 : Name.find(')')]
+ Name = Name.replace('WINAPI', '')
+ Name = Name.replace('*', '').strip()
+ if Name.upper() != Name:
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_TYPEDEF_STATEMENT, Name):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_TYPEDEF_STATEMENT, OtherMsg="The #typedef name [%s] does not follow the rules" % (Name), BelongsToTable=FileTable, BelongsToItem=Record[0])
+
+ # Check whether the #ifndef at the start of an include file uses both prefix and postfix underscore characters, '_'.
+ def NamingConventionCheckIfndefStatement(self, FileTable):
+ if EccGlobalData.gConfig.NamingConventionCheckIfndefStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming convention of #ifndef statement ...")
+
+ SqlCommand = """select ID, Value from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_MACRO_IFNDEF)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ if RecordSet:
+ # Only check the first ifndef statement of the file
+ FirstDefine = sorted(RecordSet, key=lambda Record: Record[0])[0]
+ Name = FirstDefine[1].replace('#ifndef', '').strip()
+ if Name[0] == '_' or Name[-1] != '_' or Name[-2] == '_':
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_IFNDEF_STATEMENT, Name):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_IFNDEF_STATEMENT, OtherMsg="The #ifndef name [%s] does not follow the rules" % (Name), BelongsToTable=FileTable, BelongsToItem=FirstDefine[0])
+
+ # Rule for path name, variable name and function name
+ # 1. First character should be upper case
+ # 2. Existing lower case in a word
+ # 3. No space existence
+ # Check whether the path name followed the rule
+ def NamingConventionCheckPathName(self):
+ if EccGlobalData.gConfig.NamingConventionCheckPathName == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming convention of file path name ...")
+ Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
+ SqlCommand = """select ID, Name from File"""
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not Pattern.match(Record[1]):
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_PATH_NAME, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_PATH_NAME, OtherMsg="The file path [%s] does not follow the rules" % (Record[1]), BelongsToTable='File', BelongsToItem=Record[0])
+
+ # Rule for path name, variable name and function name
+ # 1. First character should be upper case
+ # 2. Existing lower case in a word
+ # 3. No space existence
+ # 4. Global variable name must start with a 'g'
+ # Check whether the variable name followed the rule
+ def NamingConventionCheckVariableName(self, FileTable):
+ if EccGlobalData.gConfig.NamingConventionCheckVariableName == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming convention of variable name ...")
+ Pattern = re.compile(r'^[A-Zgm]+\S*[a-z]\S*$')
+
+ SqlCommand = """select ID, Name, Modifier from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_VARIABLE)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ Var = Record[1]
+ Modifier = Record[2]
+ if Var.startswith('CONST'):
+ Var = Var[5:].lstrip()
+ if not Pattern.match(Var) and not (Modifier.endswith('*') and Var.startswith('p')):
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, OtherMsg="The variable name [%s] does not follow the rules" % (Record[1]), BelongsToTable=FileTable, BelongsToItem=Record[0])
+
+ # Rule for path name, variable name and function name
+ # 1. First character should be upper case
+ # 2. Existing lower case in a word
+ # 3. No space existence
+ # Check whether the function name followed the rule
+ def NamingConventionCheckFunctionName(self):
+ if EccGlobalData.gConfig.NamingConventionCheckFunctionName == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming convention of function name ...")
+ Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
+ SqlCommand = """select ID, Name from Function"""
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not Pattern.match(Record[1]):
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_FUNCTION_NAME, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_FUNCTION_NAME, OtherMsg="The function name [%s] does not follow the rules" % (Record[1]), BelongsToTable='Function', BelongsToItem=Record[0])
+
+ # Check whether NO use short variable name with single character
+ def NamingConventionCheckSingleCharacterVariable(self, FileTable):
+ if EccGlobalData.gConfig.NamingConventionCheckSingleCharacterVariable == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming convention of single character variable name ...")
+
+ SqlCommand = """select ID, Name from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_VARIABLE)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ Variable = Record[1].replace('*', '')
+ if len(Variable) == 1:
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_SINGLE_CHARACTER_VARIABLE, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_SINGLE_CHARACTER_VARIABLE, OtherMsg="The variable name [%s] does not follow the rules" % (Record[1]), BelongsToTable=FileTable, BelongsToItem=Record[0])
+
+def FindPara(FilePath, Para, CallingLine):
+ Lines = open(FilePath).readlines()
+ Line = ''
+ for Index in range(CallingLine - 1, 0, -1):
+ # Find the nearest statement for Para
+ Line = Lines[Index].strip()
+ if Line.startswith('%s = ' % Para):
+ Line = Line.strip()
+ return Line
+ break
+
+ return ''
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ Check = Check()
+ Check.Check()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CodeFragment.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CodeFragment.py
new file mode 100755
index 00000000..fc1f8941
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CodeFragment.py
@@ -0,0 +1,159 @@
+## @file
+# fragments of source file
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+
+## The description of comment contents and start & end position
+#
+#
+class Comment :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ # @param CommentType The type of comment (T_COMMENT_TWO_SLASH or T_COMMENT_SLASH_STAR).
+ #
+ def __init__(self, Str, Begin, End, CommentType):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+ self.Type = CommentType
+
+## The description of preprocess directives and start & end position
+#
+#
+class PP_Directive :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Str, Begin, End):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of predicate expression and start & end position
+#
+#
+class PredicateExpression :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Str, Begin, End):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of function definition and start & end position
+#
+#
+class FunctionDefinition :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ # @param LBPos The left brace position tuple.
+ #
+ def __init__(self, ModifierStr, DeclStr, Begin, End, LBPos, NamePos):
+ self.Modifier = ModifierStr
+ self.Declarator = DeclStr
+ self.StartPos = Begin
+ self.EndPos = End
+ self.LeftBracePos = LBPos
+ self.NamePos = NamePos
+
+## The description of variable declaration and start & end position
+#
+#
+class VariableDeclaration :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param NamePos The name position tuple.
+ #
+ def __init__(self, ModifierStr, DeclStr, Begin, NamePos):
+ self.Modifier = ModifierStr
+ self.Declarator = DeclStr
+ self.StartPos = Begin
+ self.NameStartPos = NamePos
+
+## The description of enum definition and start & end position
+#
+#
+class EnumerationDefinition :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Str, Begin, End):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of struct/union definition and start & end position
+#
+#
+class StructUnionDefinition :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Str, Begin, End):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of 'Typedef' definition and start & end position
+#
+#
+class TypedefDefinition :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, FromStr, ToStr, Begin, End):
+ self.FromType = FromStr
+ self.ToType = ToStr
+ self.StartPos = Begin
+ self.EndPos = End
+
+class FunctionCalling:
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Name, Param, Begin, End):
+ self.FuncName = Name
+ self.ParamList = Param
+ self.StartPos = Begin
+ self.EndPos = End
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py
new file mode 100755
index 00000000..d0e38219
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py
@@ -0,0 +1,595 @@
+## @file
+# preprocess source file
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+
+from __future__ import print_function
+from __future__ import absolute_import
+import re
+import Common.LongFilePathOs as os
+import sys
+if sys.version_info.major == 3:
+ import antlr4 as antlr
+ from Ecc.CParser4.CLexer import CLexer
+ from Ecc.CParser4.CParser import CParser
+else:
+ import antlr3 as antlr
+ antlr.InputStream = antlr.StringStream
+ from Ecc.CParser3.CLexer import CLexer
+ from Ecc.CParser3.CParser import CParser
+
+
+from Ecc import FileProfile
+from Ecc.CodeFragment import Comment
+from Ecc.CodeFragment import PP_Directive
+from Ecc.ParserWarning import Warning
+
+
+##define T_CHAR_SPACE ' '
+##define T_CHAR_NULL '\0'
+##define T_CHAR_CR '\r'
+##define T_CHAR_TAB '\t'
+##define T_CHAR_LF '\n'
+##define T_CHAR_SLASH '/'
+##define T_CHAR_BACKSLASH '\\'
+##define T_CHAR_DOUBLE_QUOTE '\"'
+##define T_CHAR_SINGLE_QUOTE '\''
+##define T_CHAR_STAR '*'
+##define T_CHAR_HASH '#'
+
+(T_CHAR_SPACE, T_CHAR_NULL, T_CHAR_CR, T_CHAR_TAB, T_CHAR_LF, T_CHAR_SLASH, \
+T_CHAR_BACKSLASH, T_CHAR_DOUBLE_QUOTE, T_CHAR_SINGLE_QUOTE, T_CHAR_STAR, T_CHAR_HASH) = \
+(' ', '\0', '\r', '\t', '\n', '/', '\\', '\"', '\'', '*', '#')
+
+SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
+
+(T_COMMENT_TWO_SLASH, T_COMMENT_SLASH_STAR) = (0, 1)
+
+(T_PP_INCLUDE, T_PP_DEFINE, T_PP_OTHERS) = (0, 1, 2)
+
+## The collector for source code fragments.
+#
+# PreprocessFile method should be called prior to ParseFile
+#
+# GetNext*** procedures mean these procedures will get next token first, then make judgement.
+# Get*** procedures mean these procedures will make judgement on current token only.
+#
+class CodeFragmentCollector:
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.Profile = FileProfile.FileProfile(FileName)
+ self.Profile.FileLinesList.append(T_CHAR_LF)
+ self.FileName = FileName
+ self.CurrentLineNumber = 1
+ self.CurrentOffsetWithinLine = 0
+ self.TokenReleaceList = []
+ self.__Token = ""
+ self.__SkippedChars = ""
+
+ ## __EndOfFile() method
+ #
+ # Judge current buffer pos is at file end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at file end
+ # @retval False Current File buffer position is NOT at file end
+ #
+ def __EndOfFile(self):
+ NumberOfLines = len(self.Profile.FileLinesList)
+ SizeOfLastLine = NumberOfLines
+ if NumberOfLines > 0:
+ SizeOfLastLine = len(self.Profile.FileLinesList[-1])
+
+ if self.CurrentLineNumber == NumberOfLines and self.CurrentOffsetWithinLine >= SizeOfLastLine - 1:
+ return True
+ elif self.CurrentLineNumber > NumberOfLines:
+ return True
+ else:
+ return False
+
+ ## __EndOfLine() method
+ #
+ # Judge current buffer pos is at line end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at line end
+ # @retval False Current File buffer position is NOT at line end
+ #
+ def __EndOfLine(self):
+ SizeOfCurrentLine = len(self.Profile.FileLinesList[self.CurrentLineNumber - 1])
+ if self.CurrentOffsetWithinLine >= SizeOfCurrentLine - 1:
+ return True
+ else:
+ return False
+
+ ## Rewind() method
+ #
+ # Reset file data buffer to the initial state
+ #
+ # @param self The object pointer
+ #
+ def Rewind(self):
+ self.CurrentLineNumber = 1
+ self.CurrentOffsetWithinLine = 0
+
+ ## __UndoOneChar() method
+ #
+ # Go back one char in the file buffer
+ #
+ # @param self The object pointer
+ # @retval True Successfully go back one char
+ # @retval False Not able to go back one char as file beginning reached
+ #
+ def __UndoOneChar(self):
+
+ if self.CurrentLineNumber == 1 and self.CurrentOffsetWithinLine == 0:
+ return False
+ elif self.CurrentOffsetWithinLine == 0:
+ self.CurrentLineNumber -= 1
+ self.CurrentOffsetWithinLine = len(self.__CurrentLine()) - 1
+ else:
+ self.CurrentOffsetWithinLine -= 1
+ return True
+
+ ## __GetOneChar() method
+ #
+ # Move forward one char in the file buffer
+ #
+ # @param self The object pointer
+ #
+ def __GetOneChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ else:
+ self.CurrentOffsetWithinLine += 1
+
+ ## __CurrentChar() method
+ #
+ # Get the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Current char
+ #
+ def __CurrentChar(self):
+ CurrentChar = self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine]
+# if CurrentChar > 255:
+# raise Warning("Non-Ascii char found At Line %d, offset %d" % (self.CurrentLineNumber, self.CurrentOffsetWithinLine), self.FileName, self.CurrentLineNumber)
+ return CurrentChar
+
+ ## __NextChar() method
+ #
+ # Get the one char pass the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Next char
+ #
+ def __NextChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ return self.Profile.FileLinesList[self.CurrentLineNumber][0]
+ else:
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine + 1]
+
+ ## __SetCurrentCharValue() method
+ #
+ # Modify the value of current char
+ #
+ # @param self The object pointer
+ # @param Value The new value of current char
+ #
+ def __SetCurrentCharValue(self, Value):
+ self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine] = Value
+
+ ## __SetCharValue() method
+ #
+ # Modify the value of current char
+ #
+ # @param self The object pointer
+ # @param Value The new value of current char
+ #
+ def __SetCharValue(self, Line, Offset, Value):
+ self.Profile.FileLinesList[Line - 1][Offset] = Value
+
+ ## __CurrentLine() method
+ #
+ # Get the list that contains current line contents
+ #
+ # @param self The object pointer
+ # @retval List current line contents
+ #
+ def __CurrentLine(self):
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1]
+
+ ## __InsertComma() method
+ #
+ # Insert ',' to replace PP
+ #
+ # @param self The object pointer
+ # @retval List current line contents
+ #
+ def __InsertComma(self, Line):
+
+
+ if self.Profile.FileLinesList[Line - 1][0] != T_CHAR_HASH:
+ BeforeHashPart = str(self.Profile.FileLinesList[Line - 1]).split(T_CHAR_HASH)[0]
+ if BeforeHashPart.rstrip().endswith(T_CHAR_COMMA) or BeforeHashPart.rstrip().endswith(';'):
+ return
+
+ if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(','):
+ return
+
+ if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(';'):
+ return
+
+ if str(self.Profile.FileLinesList[Line]).lstrip().startswith(',') or str(self.Profile.FileLinesList[Line]).lstrip().startswith(';'):
+ return
+
+ self.Profile.FileLinesList[Line - 1].insert(self.CurrentOffsetWithinLine, ',')
+
+ ## PreprocessFile() method
+ #
+ # Preprocess file contents, replace comments with spaces.
+ # In the end, rewind the file buffer pointer to the beginning
+ # BUGBUG: No !include statement processing contained in this procedure
+ # !include statement should be expanded at the same FileLinesList[CurrentLineNumber - 1]
+ #
+ # @param self The object pointer
+ #
+ def PreprocessFile(self):
+
+ self.Rewind()
+ InComment = False
+ DoubleSlashComment = False
+ HashComment = False
+ PPExtend = False
+ CommentObj = None
+ PPDirectiveObj = None
+ # HashComment in quoted string " " is ignored.
+ InString = False
+ InCharLiteral = False
+
+ self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesListFromFile]
+ while not self.__EndOfFile():
+
+ if not InComment and self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE:
+ InString = not InString
+
+ if not InComment and self.__CurrentChar() == T_CHAR_SINGLE_QUOTE:
+ InCharLiteral = not InCharLiteral
+ # meet new line, then no longer in a comment for // and '#'
+ if self.__CurrentChar() == T_CHAR_LF:
+ if HashComment and PPDirectiveObj is not None:
+ if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
+ PPDirectiveObj.Content += T_CHAR_LF
+ PPExtend = True
+ else:
+ PPExtend = False
+
+ EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ if InComment and DoubleSlashComment:
+ InComment = False
+ DoubleSlashComment = False
+ CommentObj.Content += T_CHAR_LF
+ CommentObj.EndPos = EndLinePos
+ FileProfile.CommentList.append(CommentObj)
+ CommentObj = None
+ if InComment and HashComment and not PPExtend:
+ InComment = False
+ HashComment = False
+ PPDirectiveObj.Content += T_CHAR_LF
+ PPDirectiveObj.EndPos = EndLinePos
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+ PPDirectiveObj = None
+
+ if InString or InCharLiteral:
+ CurrentLine = "".join(self.__CurrentLine())
+ if CurrentLine.rstrip(T_CHAR_LF).rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
+ SlashIndex = CurrentLine.rindex(T_CHAR_BACKSLASH)
+ self.__SetCharValue(self.CurrentLineNumber, SlashIndex, T_CHAR_SPACE)
+
+ if InComment and not DoubleSlashComment and not HashComment:
+ CommentObj.Content += T_CHAR_LF
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ # check for */ comment end
+ elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
+ CommentObj.Content += self.__CurrentChar()
+# self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ CommentObj.Content += self.__CurrentChar()
+# self.__SetCurrentCharValue(T_CHAR_SPACE)
+ CommentObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+ FileProfile.CommentList.append(CommentObj)
+ CommentObj = None
+ self.__GetOneChar()
+ InComment = False
+ # set comments to spaces
+ elif InComment:
+ if HashComment:
+ # // follows hash PP directive
+ if self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
+ InComment = False
+ HashComment = False
+ PPDirectiveObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine - 1)
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+ PPDirectiveObj = None
+ continue
+ else:
+ PPDirectiveObj.Content += self.__CurrentChar()
+ if PPExtend:
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ else:
+ CommentObj.Content += self.__CurrentChar()
+# self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ # check for // comment
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
+ InComment = True
+ DoubleSlashComment = True
+ CommentObj = Comment('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None, T_COMMENT_TWO_SLASH)
+ # check for '#' comment
+ elif self.__CurrentChar() == T_CHAR_HASH and not InString and not InCharLiteral:
+ InComment = True
+ HashComment = True
+ PPDirectiveObj = PP_Directive('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None)
+ # check for /* comment start
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
+ CommentObj = Comment('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None, T_COMMENT_SLASH_STAR)
+ CommentObj.Content += self.__CurrentChar()
+# self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ CommentObj.Content += self.__CurrentChar()
+# self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ InComment = True
+ else:
+ self.__GetOneChar()
+
+ EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ if InComment and DoubleSlashComment:
+ CommentObj.EndPos = EndLinePos
+ FileProfile.CommentList.append(CommentObj)
+ if InComment and HashComment and not PPExtend:
+ PPDirectiveObj.EndPos = EndLinePos
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+
+ self.Rewind()
+
+ def PreprocessFileWithClear(self):
+
+ self.Rewind()
+ InComment = False
+ DoubleSlashComment = False
+ HashComment = False
+ PPExtend = False
+ CommentObj = None
+ PPDirectiveObj = None
+ # HashComment in quoted string " " is ignored.
+ InString = False
+ InCharLiteral = False
+
+ self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesListFromFile]
+ while not self.__EndOfFile():
+
+ if not InComment and self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE:
+ InString = not InString
+
+ if not InComment and self.__CurrentChar() == T_CHAR_SINGLE_QUOTE:
+ InCharLiteral = not InCharLiteral
+ # meet new line, then no longer in a comment for // and '#'
+ if self.__CurrentChar() == T_CHAR_LF:
+ if HashComment and PPDirectiveObj is not None:
+ if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
+ PPDirectiveObj.Content += T_CHAR_LF
+ PPExtend = True
+ else:
+ PPExtend = False
+
+ EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ if InComment and DoubleSlashComment:
+ InComment = False
+ DoubleSlashComment = False
+ CommentObj.Content += T_CHAR_LF
+ CommentObj.EndPos = EndLinePos
+ FileProfile.CommentList.append(CommentObj)
+ CommentObj = None
+ if InComment and HashComment and not PPExtend:
+ InComment = False
+ HashComment = False
+ PPDirectiveObj.Content += T_CHAR_LF
+ PPDirectiveObj.EndPos = EndLinePos
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+ PPDirectiveObj = None
+
+ if InString or InCharLiteral:
+ CurrentLine = "".join(self.__CurrentLine())
+ if CurrentLine.rstrip(T_CHAR_LF).rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
+ SlashIndex = CurrentLine.rindex(T_CHAR_BACKSLASH)
+ self.__SetCharValue(self.CurrentLineNumber, SlashIndex, T_CHAR_SPACE)
+
+ if InComment and not DoubleSlashComment and not HashComment:
+ CommentObj.Content += T_CHAR_LF
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ # check for */ comment end
+ elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
+ CommentObj.Content += self.__CurrentChar()
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ CommentObj.Content += self.__CurrentChar()
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ CommentObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+ FileProfile.CommentList.append(CommentObj)
+ CommentObj = None
+ self.__GetOneChar()
+ InComment = False
+ # set comments to spaces
+ elif InComment:
+ if HashComment:
+ # // follows hash PP directive
+ if self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
+ InComment = False
+ HashComment = False
+ PPDirectiveObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine - 1)
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+ PPDirectiveObj = None
+ continue
+ else:
+ PPDirectiveObj.Content += self.__CurrentChar()
+# if PPExtend:
+# self.__SetCurrentCharValue(T_CHAR_SPACE)
+ else:
+ CommentObj.Content += self.__CurrentChar()
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ # check for // comment
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
+ InComment = True
+ DoubleSlashComment = True
+ CommentObj = Comment('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None, T_COMMENT_TWO_SLASH)
+ # check for '#' comment
+ elif self.__CurrentChar() == T_CHAR_HASH and not InString and not InCharLiteral:
+ InComment = True
+ HashComment = True
+ PPDirectiveObj = PP_Directive('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None)
+ # check for /* comment start
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
+ CommentObj = Comment('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None, T_COMMENT_SLASH_STAR)
+ CommentObj.Content += self.__CurrentChar()
+ self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ CommentObj.Content += self.__CurrentChar()
+ self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ InComment = True
+ else:
+ self.__GetOneChar()
+
+ EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ if InComment and DoubleSlashComment:
+ CommentObj.EndPos = EndLinePos
+ FileProfile.CommentList.append(CommentObj)
+ if InComment and HashComment and not PPExtend:
+ PPDirectiveObj.EndPos = EndLinePos
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+ self.Rewind()
+
+ ## ParseFile() method
+ #
+ # Parse the file profile buffer to extract fd, fv ... information
+ # Exception will be raised if syntax error found
+ #
+ # @param self The object pointer
+ #
+ def ParseFile(self):
+ self.PreprocessFile()
+ # restore from ListOfList to ListOfString
+ self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
+ FileStringContents = ''
+ for fileLine in self.Profile.FileLinesList:
+ FileStringContents += fileLine
+ for Token in self.TokenReleaceList:
+ if Token in FileStringContents:
+ FileStringContents = FileStringContents.replace(Token, 'TOKENSTRING')
+ cStream = antlr.InputStream(FileStringContents)
+ lexer = CLexer(cStream)
+ tStream = antlr.CommonTokenStream(lexer)
+ parser = CParser(tStream)
+ parser.translation_unit()
+
+ def ParseFileWithClearedPPDirective(self):
+ self.PreprocessFileWithClear()
+ # restore from ListOfList to ListOfString
+ self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
+ FileStringContents = ''
+ for fileLine in self.Profile.FileLinesList:
+ FileStringContents += fileLine
+ cStream = antlr.InputStream(FileStringContents)
+ lexer = CLexer(cStream)
+ tStream = antlr.CommonTokenStream(lexer)
+ parser = CParser(tStream)
+ parser.translation_unit()
+
+ def CleanFileProfileBuffer(self):
+ FileProfile.CommentList = []
+ FileProfile.PPDirectiveList = []
+ FileProfile.PredicateExpressionList = []
+ FileProfile.FunctionDefinitionList = []
+ FileProfile.VariableDeclarationList = []
+ FileProfile.EnumerationDefinitionList = []
+ FileProfile.StructUnionDefinitionList = []
+ FileProfile.TypedefDefinitionList = []
+ FileProfile.FunctionCallingList = []
+
+ def PrintFragments(self):
+
+ print('################# ' + self.FileName + '#####################')
+
+ print('/****************************************/')
+ print('/*************** COMMENTS ***************/')
+ print('/****************************************/')
+ for comment in FileProfile.CommentList:
+ print(str(comment.StartPos) + comment.Content)
+
+ print('/****************************************/')
+ print('/********* PREPROCESS DIRECTIVES ********/')
+ print('/****************************************/')
+ for pp in FileProfile.PPDirectiveList:
+ print(str(pp.StartPos) + pp.Content)
+
+ print('/****************************************/')
+ print('/********* VARIABLE DECLARATIONS ********/')
+ print('/****************************************/')
+ for var in FileProfile.VariableDeclarationList:
+ print(str(var.StartPos) + var.Modifier + ' '+ var.Declarator)
+
+ print('/****************************************/')
+ print('/********* FUNCTION DEFINITIONS *********/')
+ print('/****************************************/')
+ for func in FileProfile.FunctionDefinitionList:
+ print(str(func.StartPos) + func.Modifier + ' '+ func.Declarator + ' ' + str(func.NamePos))
+
+ print('/****************************************/')
+ print('/************ ENUMERATIONS **************/')
+ print('/****************************************/')
+ for enum in FileProfile.EnumerationDefinitionList:
+ print(str(enum.StartPos) + enum.Content)
+
+ print('/****************************************/')
+ print('/*********** STRUCTS/UNIONS *************/')
+ print('/****************************************/')
+ for su in FileProfile.StructUnionDefinitionList:
+ print(str(su.StartPos) + su.Content)
+
+ print('/****************************************/')
+ print('/********* PREDICATE EXPRESSIONS ********/')
+ print('/****************************************/')
+ for predexp in FileProfile.PredicateExpressionList:
+ print(str(predexp.StartPos) + predexp.Content)
+
+ print('/****************************************/')
+ print('/************** TYPEDEFS ****************/')
+ print('/****************************************/')
+ for typedef in FileProfile.TypedefDefinitionList:
+ print(str(typedef.StartPos) + typedef.ToType)
+
+if __name__ == "__main__":
+
+ collector = CodeFragmentCollector(sys.argv[1])
+ collector.PreprocessFile()
+ print("For Test.")
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Configuration.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Configuration.py
new file mode 100755
index 00000000..1801c728
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Configuration.py
@@ -0,0 +1,444 @@
+## @file
+# This file is used to define class Configuration
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+import Common.LongFilePathOs as os
+import Common.EdkLogger as EdkLogger
+from Common.DataType import *
+from Common.StringUtils import *
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+_ConfigFileToInternalTranslation = {
+ # not same
+ "ModifierList":"ModifierSet",
+
+ # same
+ # please keep this in correct alphabetical order.
+ "AutoCorrect":"AutoCorrect",
+ "BinaryExtList":"BinaryExtList",
+ "CFunctionLayoutCheckAll":"CFunctionLayoutCheckAll",
+ "CFunctionLayoutCheckDataDeclaration":"CFunctionLayoutCheckDataDeclaration",
+ "CFunctionLayoutCheckFunctionBody":"CFunctionLayoutCheckFunctionBody",
+ "CFunctionLayoutCheckFunctionName":"CFunctionLayoutCheckFunctionName",
+ "CFunctionLayoutCheckFunctionPrototype":"CFunctionLayoutCheckFunctionPrototype",
+ "CFunctionLayoutCheckNoInitOfVariable":"CFunctionLayoutCheckNoInitOfVariable",
+ "CFunctionLayoutCheckNoStatic":"CFunctionLayoutCheckNoStatic",
+ "CFunctionLayoutCheckOptionalFunctionalModifier":"CFunctionLayoutCheckOptionalFunctionalModifier",
+ "CFunctionLayoutCheckReturnType":"CFunctionLayoutCheckReturnType",
+ "CheckAll":"CheckAll",
+ "Copyright":"Copyright",
+ "DeclarationDataTypeCheckAll":"DeclarationDataTypeCheckAll",
+ "DeclarationDataTypeCheckEFIAPIModifier":"DeclarationDataTypeCheckEFIAPIModifier",
+ "DeclarationDataTypeCheckEnumeratedType":"DeclarationDataTypeCheckEnumeratedType",
+ "DeclarationDataTypeCheckInOutModifier":"DeclarationDataTypeCheckInOutModifier",
+ "DeclarationDataTypeCheckNoUseCType":"DeclarationDataTypeCheckNoUseCType",
+ "DeclarationDataTypeCheckSameStructure":"DeclarationDataTypeCheckSameStructure",
+ "DeclarationDataTypeCheckStructureDeclaration":"DeclarationDataTypeCheckStructureDeclaration",
+ "DeclarationDataTypeCheckUnionType":"DeclarationDataTypeCheckUnionType",
+ "DoxygenCheckAll":"DoxygenCheckAll",
+ "DoxygenCheckCommand":"DoxygenCheckCommand",
+ "DoxygenCheckCommentDescription":"DoxygenCheckCommentDescription",
+ "DoxygenCheckCommentFormat":"DoxygenCheckCommentFormat",
+ "DoxygenCheckFileHeader":"DoxygenCheckFileHeader",
+ "DoxygenCheckFunctionHeader":"DoxygenCheckFunctionHeader",
+ "GeneralCheckAll":"GeneralCheckAll",
+ "GeneralCheckCarriageReturn":"GeneralCheckCarriageReturn",
+ "GeneralCheckFileExistence":"GeneralCheckFileExistence",
+ "GeneralCheckIndentation":"GeneralCheckIndentation",
+ "GeneralCheckIndentationWidth":"GeneralCheckIndentationWidth",
+ "GeneralCheckLine":"GeneralCheckLine",
+ "GeneralCheckLineEnding":"GeneralCheckLineEnding",
+ "GeneralCheckLineWidth":"GeneralCheckLineWidth",
+ "GeneralCheckNoProgma":"GeneralCheckNoProgma",
+ "GeneralCheckNoTab":"GeneralCheckNoTab",
+ "GeneralCheckNo_Asm":"GeneralCheckNo_Asm",
+ "GeneralCheckNonAcsii":"GeneralCheckNonAcsii",
+ "GeneralCheckTabWidth":"GeneralCheckTabWidth",
+ "GeneralCheckTrailingWhiteSpaceLine":"GeneralCheckTrailingWhiteSpaceLine",
+ "GeneralCheckUni":"GeneralCheckUni",
+ "HeaderCheckAll":"HeaderCheckAll",
+ "HeaderCheckCFileCommentLicenseFormat":"HeaderCheckCFileCommentLicenseFormat",
+ "HeaderCheckCFileCommentReferenceFormat":"HeaderCheckCFileCommentReferenceFormat",
+ "HeaderCheckCFileCommentStartSpacesNum":"HeaderCheckCFileCommentStartSpacesNum",
+ "HeaderCheckFile":"HeaderCheckFile",
+ "HeaderCheckFileCommentEnd":"HeaderCheckFileCommentEnd",
+ "HeaderCheckFunction":"HeaderCheckFunction",
+ "IncludeFileCheckAll":"IncludeFileCheckAll",
+ "IncludeFileCheckData":"IncludeFileCheckData",
+ "IncludeFileCheckIfndefStatement":"IncludeFileCheckIfndefStatement",
+ "IncludeFileCheckSameName":"IncludeFileCheckSameName",
+ "MetaDataFileCheckAll":"MetaDataFileCheckAll",
+ "MetaDataFileCheckBinaryInfInFdf":"MetaDataFileCheckBinaryInfInFdf",
+ "MetaDataFileCheckGenerateFileList":"MetaDataFileCheckGenerateFileList",
+ "MetaDataFileCheckGuidDuplicate":"MetaDataFileCheckGuidDuplicate",
+ "MetaDataFileCheckLibraryDefinedInDec":"MetaDataFileCheckLibraryDefinedInDec",
+ "MetaDataFileCheckLibraryInstance":"MetaDataFileCheckLibraryInstance",
+ "MetaDataFileCheckLibraryInstanceDependent":"MetaDataFileCheckLibraryInstanceDependent",
+ "MetaDataFileCheckLibraryInstanceOrder":"MetaDataFileCheckLibraryInstanceOrder",
+ "MetaDataFileCheckLibraryNoUse":"MetaDataFileCheckLibraryNoUse",
+ "MetaDataFileCheckModuleFileGuidDuplication":"MetaDataFileCheckModuleFileGuidDuplication",
+ "MetaDataFileCheckModuleFileGuidFormat":"MetaDataFileCheckModuleFileGuidFormat",
+ "MetaDataFileCheckModuleFileNoUse":"MetaDataFileCheckModuleFileNoUse",
+ "MetaDataFileCheckModuleFilePcdFormat":"MetaDataFileCheckModuleFilePcdFormat",
+ "MetaDataFileCheckModuleFilePpiFormat":"MetaDataFileCheckModuleFilePpiFormat",
+ "MetaDataFileCheckModuleFileProtocolFormat":"MetaDataFileCheckModuleFileProtocolFormat",
+ "MetaDataFileCheckPathName":"MetaDataFileCheckPathName",
+ "MetaDataFileCheckPathOfGenerateFileList":"MetaDataFileCheckPathOfGenerateFileList",
+ "MetaDataFileCheckPcdDuplicate":"MetaDataFileCheckPcdDuplicate",
+ "MetaDataFileCheckPcdFlash":"MetaDataFileCheckPcdFlash",
+ "MetaDataFileCheckPcdNoUse":"MetaDataFileCheckPcdNoUse",
+ "MetaDataFileCheckPcdType":"MetaDataFileCheckPcdType",
+ "NamingConventionCheckAll":"NamingConventionCheckAll",
+ "NamingConventionCheckDefineStatement":"NamingConventionCheckDefineStatement",
+ "NamingConventionCheckFunctionName":"NamingConventionCheckFunctionName",
+ "NamingConventionCheckIfndefStatement":"NamingConventionCheckIfndefStatement",
+ "NamingConventionCheckPathName":"NamingConventionCheckPathName",
+ "NamingConventionCheckSingleCharacterVariable":"NamingConventionCheckSingleCharacterVariable",
+ "NamingConventionCheckTypedefStatement":"NamingConventionCheckTypedefStatement",
+ "NamingConventionCheckVariableName":"NamingConventionCheckVariableName",
+ "PredicateExpressionCheckAll":"PredicateExpressionCheckAll",
+ "PredicateExpressionCheckBooleanValue":"PredicateExpressionCheckBooleanValue",
+ "PredicateExpressionCheckComparisonNullType":"PredicateExpressionCheckComparisonNullType",
+ "PredicateExpressionCheckNonBooleanOperator":"PredicateExpressionCheckNonBooleanOperator",
+ "ScanOnlyDirList":"ScanOnlyDirList",
+ "SkipDirList":"SkipDirList",
+ "SkipFileList":"SkipFileList",
+ "SmmCommParaCheckAll":"SmmCommParaCheckAll",
+ "SmmCommParaCheckBufferType":"SmmCommParaCheckBufferType",
+ "SpaceCheckAll":"SpaceCheckAll",
+ "SpellingCheckAll":"SpellingCheckAll",
+ "TokenReleaceList":"TokenReleaceList",
+ "UniCheckAll":"UniCheckAll",
+ "UniCheckHelpInfo":"UniCheckHelpInfo",
+ "UniCheckPCDInfo":"UniCheckPCDInfo",
+ "Version":"Version"
+ }
+
+## Configuration
+#
+# This class is used to define all items in configuration file
+#
+# @param Filename: The name of configuration file, the default is config.ini
+#
+class Configuration(object):
+ def __init__(self, Filename):
+ self.Filename = Filename
+
+ self.Version = 0.1
+
+ ## Identify to if check all items
+ # 1 - Check all items and ignore all other detailed items
+ # 0 - Not check all items, the tool will go through all other detailed items to decide to check or not
+ #
+ self.CheckAll = 0
+
+ ## Identify to if automatically correct mistakes
+ # 1 - Automatically correct
+ # 0 - Not automatically correct
+ # Only the following check points can be automatically corrected, others not listed below are not supported even it is 1
+ #
+ # GeneralCheckTab
+ # GeneralCheckIndentation
+ # GeneralCheckLine
+ # GeneralCheckCarriageReturn
+ # SpaceCheckAll
+ #
+ self.AutoCorrect = 0
+
+ # List customized Modifer here, split with ','
+ # Defaultly use the definition in class DataType
+ self.ModifierSet = MODIFIER_SET
+
+ ## General Checking
+ self.GeneralCheckAll = 0
+
+ # Check whether NO Tab is used, replaced with spaces
+ self.GeneralCheckNoTab = 1
+ # The width of Tab
+ self.GeneralCheckTabWidth = 2
+ # Check whether the indentation is followed coding style
+ self.GeneralCheckIndentation = 1
+ # The width of indentation
+ self.GeneralCheckIndentationWidth = 2
+ # Check whether no line is exceeding defined widty
+ self.GeneralCheckLine = 1
+ # The width of a line
+ self.GeneralCheckLineWidth = 120
+ # Check whether no use of _asm in the source file
+ self.GeneralCheckNo_Asm = 1
+ # Check whether no use of "#progma" in source file except "#pragma pack(#)".
+ self.GeneralCheckNoProgma = 1
+ # Check whether there is a carriage return at the end of the file
+ self.GeneralCheckCarriageReturn = 1
+ # Check whether the file exists
+ self.GeneralCheckFileExistence = 1
+ # Check whether file has non ACSII char
+ self.GeneralCheckNonAcsii = 1
+ # Check whether UNI file is valid
+ self.GeneralCheckUni = 1
+ # Check Only use CRLF (Carriage Return Line Feed) line endings.
+ self.GeneralCheckLineEnding = 1
+ # Check if there is no trailing white space in one line.
+ self.GeneralCheckTrailingWhiteSpaceLine = 1
+
+ self.CFunctionLayoutCheckNoDeprecated = 1
+
+ ## Space Checking
+ self.SpaceCheckAll = 1
+
+ ## Predicate Expression Checking
+ self.PredicateExpressionCheckAll = 0
+
+ # Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
+ self.PredicateExpressionCheckBooleanValue = 1
+ # Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
+ self.PredicateExpressionCheckNonBooleanOperator = 1
+ # Check whether a comparison of any pointer to zero must be done via the NULL type
+ self.PredicateExpressionCheckComparisonNullType = 1
+
+ ## Headers Checking
+ self.HeaderCheckAll = 0
+
+ # Check whether File header exists
+ self.HeaderCheckFile = 1
+ # Check whether Function header exists
+ self.HeaderCheckFunction = 1
+ # Check whether Meta data File header Comment End with '##'
+ self.HeaderCheckFileCommentEnd = 1
+ # Check whether C File header Comment content start with two spaces
+ self.HeaderCheckCFileCommentStartSpacesNum = 1
+ # Check whether C File header Comment's each reference at list should begin with a bullet character '-'
+ self.HeaderCheckCFileCommentReferenceFormat = 1
+ # Check whether C File header Comment have the License immediately after the ""Copyright"" line
+ self.HeaderCheckCFileCommentLicenseFormat = 1
+
+ ## C Function Layout Checking
+ self.CFunctionLayoutCheckAll = 0
+
+ # Check whether return type exists and in the first line
+ self.CFunctionLayoutCheckReturnType = 1
+ # Check whether any optional functional modifiers exist and next to the return type
+ self.CFunctionLayoutCheckOptionalFunctionalModifier = 1
+ # Check whether the next line contains the function name, left justified, followed by the beginning of the parameter list
+ # Check whether the closing parenthesis is on its own line and also indented two spaces
+ self.CFunctionLayoutCheckFunctionName = 1
+ # Check whether the function prototypes in include files have the same form as function definitions
+ self.CFunctionLayoutCheckFunctionPrototype = 1
+ # Check whether the body of a function is contained by open and close braces that must be in the first column
+ self.CFunctionLayoutCheckFunctionBody = 1
+ # Check whether the data declarations is the first code in a module.
+ self.CFunctionLayoutCheckDataDeclaration = 1
+ # Check whether no initialization of a variable as part of its declaration
+ self.CFunctionLayoutCheckNoInitOfVariable = 1
+ # Check whether no use of STATIC for functions
+ self.CFunctionLayoutCheckNoStatic = 1
+
+ ## Include Files Checking
+ self.IncludeFileCheckAll = 0
+
+ #Check whether having include files with same name
+ self.IncludeFileCheckSameName = 1
+ # Check whether all include file contents is guarded by a #ifndef statement.
+ # the #ifndef must be the first line of code following the file header comment
+ # the #endif must appear on the last line in the file
+ self.IncludeFileCheckIfndefStatement = 1
+ # Check whether include files contain only public or only private data
+ # Check whether include files NOT contain code or define data variables
+ self.IncludeFileCheckData = 1
+
+ ## Declarations and Data Types Checking
+ self.DeclarationDataTypeCheckAll = 0
+
+ # Check whether no use of int, unsigned, char, void, long in any .c, .h or .asl files.
+ self.DeclarationDataTypeCheckNoUseCType = 1
+ # Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
+ self.DeclarationDataTypeCheckInOutModifier = 1
+ # Check whether the EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols
+ self.DeclarationDataTypeCheckEFIAPIModifier = 1
+ # Check whether Enumerated Type has a 'typedef' and the name is capital
+ self.DeclarationDataTypeCheckEnumeratedType = 1
+ # Check whether Structure Type has a 'typedef' and the name is capital
+ self.DeclarationDataTypeCheckStructureDeclaration = 1
+ # Check whether having same Structure
+ self.DeclarationDataTypeCheckSameStructure = 1
+ # Check whether Union Type has a 'typedef' and the name is capital
+ self.DeclarationDataTypeCheckUnionType = 1
+
+ ## Naming Conventions Checking
+ self.NamingConventionCheckAll = 0
+
+ # Check whether only capital letters are used for #define declarations
+ self.NamingConventionCheckDefineStatement = 1
+ # Check whether only capital letters are used for typedef declarations
+ self.NamingConventionCheckTypedefStatement = 1
+ # Check whether the #ifndef at the start of an include file uses both prefix and postfix underscore characters, '_'.
+ self.NamingConventionCheckIfndefStatement = 1
+ # Rule for path name, variable name and function name
+ # 1. First character should be upper case
+ # 2. Existing lower case in a word
+ # 3. No space existence
+ # Check whether the path name followed the rule
+ self.NamingConventionCheckPathName = 1
+ # Check whether the variable name followed the rule
+ self.NamingConventionCheckVariableName = 1
+ # Check whether the function name followed the rule
+ self.NamingConventionCheckFunctionName = 1
+ # Check whether NO use short variable name with single character
+ self.NamingConventionCheckSingleCharacterVariable = 1
+
+ ## Doxygen Checking
+ self.DoxygenCheckAll = 0
+
+ # Check whether the file headers are followed Doxygen special documentation blocks in section 2.3.5
+ self.DoxygenCheckFileHeader = 1
+ # Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
+ self.DoxygenCheckFunctionHeader = 1
+ # Check whether the first line of text in a comment block is a brief description of the element being documented.
+ # The brief description must end with a period.
+ self.DoxygenCheckCommentDescription = 1
+ # Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
+ self.DoxygenCheckCommentFormat = 1
+ # Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
+ self.DoxygenCheckCommand = 1
+
+ ## Meta-Data File Processing Checking
+ self.MetaDataFileCheckAll = 0
+
+ # Check whether each file defined in meta-data exists
+ self.MetaDataFileCheckPathName = 1
+ # Generate a list for all files defined in meta-data files
+ self.MetaDataFileCheckGenerateFileList = 1
+ # The path of log file
+ self.MetaDataFileCheckPathOfGenerateFileList = 'File.log'
+ # Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
+ # Each Library Instance must specify the Supported Module Types in its INF file,
+ # and any module specifying the library instance must be one of the supported types.
+ self.MetaDataFileCheckLibraryInstance = 1
+ # Check whether a Library Instance has been defined for all dependent library classes
+ self.MetaDataFileCheckLibraryInstanceDependent = 1
+ # Check whether the Library Instances specified by the LibraryClasses sections are listed in order of dependencies
+ self.MetaDataFileCheckLibraryInstanceOrder = 1
+ # Check whether the unnecessary inclusion of library classes in the INF file
+ self.MetaDataFileCheckLibraryNoUse = 1
+ # Check the header file in Include\Library directory whether be defined in the package DEC file.
+ self.MetaDataFileCheckLibraryDefinedInDec = 1
+ # Check whether an INF file is specified in the FDF file, but not in the DSC file, then the INF file must be for a Binary module only
+ self.MetaDataFileCheckBinaryInfInFdf = 1
+ # Not to report error and warning related OS include file such as "windows.h" and "stdio.h"
+ # Check whether a PCD is set in a DSC file or the FDF file, but not in both.
+ self.MetaDataFileCheckPcdDuplicate = 1
+ # Check whether PCD settings in the FDF file can only be related to flash.
+ self.MetaDataFileCheckPcdFlash = 1
+ # Check whether PCDs used in INF files but not specified in DSC or FDF files
+ self.MetaDataFileCheckPcdNoUse = 1
+ # Check whether having duplicate guids defined for Guid/Protocol/Ppi
+ self.MetaDataFileCheckGuidDuplicate = 1
+ # Check whether all files under module directory are described in INF files
+ self.MetaDataFileCheckModuleFileNoUse = 1
+ # Check whether the PCD is correctly used in C function via its type
+ self.MetaDataFileCheckPcdType = 1
+ # Check whether there are FILE_GUID duplication among different INF files
+ self.MetaDataFileCheckModuleFileGuidDuplication = 1
+
+ # Check Guid Format in INF files
+ self.MetaDataFileCheckModuleFileGuidFormat = 1
+ # Check Protocol Format in INF files
+ self.MetaDataFileCheckModuleFileProtocolFormat = 1
+ # Check Ppi Format in INF files
+ self.MetaDataFileCheckModuleFilePpiFormat = 1
+ # Check Pcd Format in INF files
+ self.MetaDataFileCheckModuleFilePcdFormat = 1
+
+ # Check UNI file
+ self.UniCheckAll = 0
+ # Check INF or DEC file whether defined the localized information in the associated UNI file.
+ self.UniCheckHelpInfo = 1
+ # Check PCD whether defined the prompt, help in the DEC file and localized information in the associated UNI file.
+ self.UniCheckPCDInfo = 1
+
+ # Check SMM communication function parameter
+ self.SmmCommParaCheckAll = 0
+ # Check if the EFI_SMM_COMMUNICATION_PROTOCOL parameter buffer type is Reserved / ACPI NVS or UEFI RT code/data
+ self.SmmCommParaCheckBufferType = -1
+
+ #
+ # The check points in this section are reserved
+ #
+ # GotoStatementCheckAll = 0
+ #
+ self.SpellingCheckAll = 0
+
+ # The directory listed here will not be parsed, split with ','
+ self.SkipDirList = []
+
+ # The file listed here will not be parsed, split with ','
+ self.SkipFileList = []
+
+ # A list for binary file ext name
+ self.BinaryExtList = []
+
+ # A list for only scanned folders
+ self.ScanOnlyDirList = []
+
+ # A list for Copyright format
+ self.Copyright = []
+
+ self.TokenReleaceList = []
+
+ self.ParseConfig()
+
+ def ParseConfig(self):
+ Filepath = os.path.normpath(self.Filename)
+ if not os.path.isfile(Filepath):
+ ErrorMsg = "Can't find configuration file '%s'" % Filepath
+ EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath)
+
+ LineNo = 0
+ for Line in open(Filepath, 'r'):
+ LineNo = LineNo + 1
+ Line = CleanString(Line)
+ if Line != '':
+ List = GetSplitValueList(Line, TAB_EQUAL_SPLIT)
+ if List[0] not in _ConfigFileToInternalTranslation:
+ ErrorMsg = "Invalid configuration option '%s' was found" % List[0]
+ EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath, Line = LineNo)
+ assert _ConfigFileToInternalTranslation[List[0]] in self.__dict__
+ if List[0] == 'ModifierList':
+ List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
+ if List[0] == 'MetaDataFileCheckPathOfGenerateFileList' and List[1] == "":
+ continue
+ if List[0] == 'SkipDirList':
+ List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
+ if List[0] == 'SkipFileList':
+ List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
+ if List[0] == 'BinaryExtList':
+ List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
+ if List[0] == 'Copyright':
+ List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
+ if List[0] == 'TokenReleaceList':
+ List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
+ self.__dict__[_ConfigFileToInternalTranslation[List[0]]] = List[1]
+
+ def ShowMe(self):
+ print(self.Filename)
+ for Key in self.__dict__.keys():
+ print(Key, '=', self.__dict__[Key])
+
+#
+# test that our dict and out class still match in contents.
+#
+if __name__ == '__main__':
+ myconfig = Configuration("BaseTools\Source\Python\Ecc\config.ini")
+ for each in myconfig.__dict__:
+ if each == "Filename":
+ continue
+ assert each in _ConfigFileToInternalTranslation.values()
+ for each in _ConfigFileToInternalTranslation.values():
+ assert each in myconfig.__dict__
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Database.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Database.py
new file mode 100755
index 00000000..94837005
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Database.py
@@ -0,0 +1,340 @@
+## @file
+# This file is used to create a database used by ECC tool
+#
+# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import sqlite3
+import Common.LongFilePathOs as os, time
+
+import Common.EdkLogger as EdkLogger
+import CommonDataClass.DataClass as DataClass
+
+from Table.TableDataModel import TableDataModel
+from Table.TableFile import TableFile
+from Table.TableFunction import TableFunction
+from Table.TablePcd import TablePcd
+from Table.TableIdentifier import TableIdentifier
+from Table.TableReport import TableReport
+from Ecc.MetaFileWorkspace.MetaFileTable import ModuleTable
+from Ecc.MetaFileWorkspace.MetaFileTable import PackageTable
+from Ecc.MetaFileWorkspace.MetaFileTable import PlatformTable
+from Table.TableFdf import TableFdf
+
+##
+# Static definitions
+#
+DATABASE_PATH = "Ecc.db"
+
+## Database
+#
+# This class defined the ECC database
+# During the phase of initialization, the database will create all tables and
+# insert all records of table DataModel
+#
+# @param object: Inherited from object class
+# @param DbPath: A string for the path of the ECC database
+#
+# @var Conn: Connection of the ECC database
+# @var Cur: Cursor of the connection
+# @var TblDataModel: Local instance for TableDataModel
+#
+class Database(object):
+ def __init__(self, DbPath):
+ self.DbPath = DbPath
+ self.Conn = None
+ self.Cur = None
+ self.TblDataModel = None
+ self.TblFile = None
+ self.TblFunction = None
+ self.TblIdentifier = None
+ self.TblPcd = None
+ self.TblReport = None
+ self.TblInf = None
+ self.TblDec = None
+ self.TblDsc = None
+ self.TblFdf = None
+
+ ## Initialize ECC database
+ #
+ # 1. Delete all old existing tables
+ # 2. Create new tables
+ # 3. Initialize table DataModel
+ #
+ def InitDatabase(self, NewDatabase = True):
+ EdkLogger.verbose("\nInitialize ECC database started ...")
+ #
+ # Drop all old existing tables
+ #
+ if NewDatabase:
+ if os.path.exists(self.DbPath):
+ os.remove(self.DbPath)
+ self.Conn = sqlite3.connect(self.DbPath, isolation_level = 'DEFERRED')
+ self.Conn.execute("PRAGMA page_size=4096")
+ self.Conn.execute("PRAGMA synchronous=OFF")
+ # to avoid non-ascii character conversion error
+ self.Conn.text_factory = str
+ self.Cur = self.Conn.cursor()
+
+ self.TblDataModel = TableDataModel(self.Cur)
+ self.TblFile = TableFile(self.Cur)
+ self.TblFunction = TableFunction(self.Cur)
+ self.TblIdentifier = TableIdentifier(self.Cur)
+ self.TblPcd = TablePcd(self.Cur)
+ self.TblReport = TableReport(self.Cur)
+ self.TblInf = ModuleTable(self.Cur)
+ self.TblDec = PackageTable(self.Cur)
+ self.TblDsc = PlatformTable(self.Cur)
+ self.TblFdf = TableFdf(self.Cur)
+
+ #
+ # Create new tables
+ #
+ if NewDatabase:
+ self.TblDataModel.Create()
+ self.TblFile.Create()
+ self.TblFunction.Create()
+ self.TblPcd.Create()
+ self.TblReport.Create()
+ self.TblInf.Create()
+ self.TblDec.Create()
+ self.TblDsc.Create()
+ self.TblFdf.Create()
+
+ #
+ # Init each table's ID
+ #
+ self.TblDataModel.InitID()
+ self.TblFile.InitID()
+ self.TblFunction.InitID()
+ self.TblPcd.InitID()
+ self.TblReport.InitID()
+ self.TblInf.InitID()
+ self.TblDec.InitID()
+ self.TblDsc.InitID()
+ self.TblFdf.InitID()
+
+ #
+ # Initialize table DataModel
+ #
+ if NewDatabase:
+ self.TblDataModel.InitTable()
+
+ EdkLogger.verbose("Initialize ECC database ... DONE!")
+
+ ## Query a table
+ #
+ # @param Table: The instance of the table to be queried
+ #
+ def QueryTable(self, Table):
+ Table.Query()
+
+ ## Close entire database
+ #
+ # Commit all first
+ # Close the connection and cursor
+ #
+ def Close(self):
+ #
+ # Commit to file
+ #
+ self.Conn.commit()
+
+ #
+ # Close connection and cursor
+ #
+ self.Cur.close()
+ self.Conn.close()
+
+ ## Insert one file information
+ #
+ # Insert one file's information to the database
+ # 1. Create a record in TableFile
+ # 2. Create functions one by one
+ # 2.1 Create variables of function one by one
+ # 2.2 Create pcds of function one by one
+ # 3. Create variables one by one
+ # 4. Create pcds one by one
+ #
+ def InsertOneFile(self, File):
+ #
+ # Insert a record for file
+ #
+ FileID = self.TblFile.Insert(File.Name, File.ExtName, File.Path, File.FullPath, Model = File.Model, TimeStamp = File.TimeStamp)
+
+ if File.Model == DataClass.MODEL_FILE_C or File.Model == DataClass.MODEL_FILE_H:
+ IdTable = TableIdentifier(self.Cur)
+ IdTable.Table = "Identifier%s" % FileID
+ IdTable.Create()
+ #
+ # Insert function of file
+ #
+ for Function in File.FunctionList:
+ FunctionID = self.TblFunction.Insert(Function.Header, Function.Modifier, Function.Name, Function.ReturnStatement, \
+ Function.StartLine, Function.StartColumn, Function.EndLine, Function.EndColumn, \
+ Function.BodyStartLine, Function.BodyStartColumn, FileID, \
+ Function.FunNameStartLine, Function.FunNameStartColumn)
+ #
+ # Insert Identifier of function
+ #
+ for Identifier in Function.IdentifierList:
+ IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
+ FileID, FunctionID, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
+ #
+ # Insert Pcd of function
+ #
+ for Pcd in Function.PcdList:
+ PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
+ FileID, FunctionID, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
+ #
+ # Insert Identifier of file
+ #
+ for Identifier in File.IdentifierList:
+ IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
+ FileID, -1, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
+ #
+ # Insert Pcd of file
+ #
+ for Pcd in File.PcdList:
+ PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
+ FileID, -1, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
+
+ EdkLogger.verbose("Insert information from file %s ... DONE!" % File.FullPath)
+
+ ## UpdateIdentifierBelongsToFunction
+ #
+ # Update the field "BelongsToFunction" for each Identifier
+ #
+ #
+ def UpdateIdentifierBelongsToFunction_disabled(self):
+ EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
+
+ SqlCommand = """select ID, BelongsToFile, StartLine, EndLine, Model from Identifier"""
+ EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+ self.Cur.execute(SqlCommand)
+ Records = self.Cur.fetchall()
+ for Record in Records:
+ IdentifierID = Record[0]
+ BelongsToFile = Record[1]
+ StartLine = Record[2]
+ EndLine = Record[3]
+ Model = Record[4]
+
+ #
+ # Check whether an identifier belongs to a function
+ #
+ EdkLogger.debug(4, "For common identifiers ... ")
+ SqlCommand = """select ID from Function
+ where StartLine < %s and EndLine > %s
+ and BelongsToFile = %s""" % (StartLine, EndLine, BelongsToFile)
+ EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+ self.Cur.execute(SqlCommand)
+ IDs = self.Cur.fetchall()
+ for ID in IDs:
+ SqlCommand = """Update Identifier set BelongsToFunction = %s where ID = %s""" % (ID[0], IdentifierID)
+ EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+ self.Cur.execute(SqlCommand)
+
+ #
+ # Check whether the identifier is a function header
+ #
+ EdkLogger.debug(4, "For function headers ... ")
+ if Model == DataClass.MODEL_IDENTIFIER_COMMENT:
+ SqlCommand = """select ID from Function
+ where StartLine = %s + 1
+ and BelongsToFile = %s""" % (EndLine, BelongsToFile)
+ EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+ self.Cur.execute(SqlCommand)
+ IDs = self.Cur.fetchall()
+ for ID in IDs:
+ SqlCommand = """Update Identifier set BelongsToFunction = %s, Model = %s where ID = %s""" % (ID[0], DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, IdentifierID)
+ EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+ self.Cur.execute(SqlCommand)
+
+ EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
+
+
+ ## UpdateIdentifierBelongsToFunction
+ #
+ # Update the field "BelongsToFunction" for each Identifier
+ #
+ #
+ def UpdateIdentifierBelongsToFunction(self):
+ EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
+
+ SqlCommand = """select ID, BelongsToFile, StartLine, EndLine from Function"""
+ Records = self.TblFunction.Exec(SqlCommand)
+ Data1 = []
+ Data2 = []
+ for Record in Records:
+ FunctionID = Record[0]
+ BelongsToFile = Record[1]
+ StartLine = Record[2]
+ EndLine = Record[3]
+ #Data1.append(("'file%s'" % BelongsToFile, FunctionID, BelongsToFile, StartLine, EndLine))
+ #Data2.append(("'file%s'" % BelongsToFile, FunctionID, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, BelongsToFile, DataClass.MODEL_IDENTIFIER_COMMENT, StartLine - 1))
+
+ SqlCommand = """Update Identifier%s set BelongsToFunction = %s where BelongsToFile = %s and StartLine > %s and EndLine < %s""" % \
+ (BelongsToFile, FunctionID, BelongsToFile, StartLine, EndLine)
+ self.TblIdentifier.Exec(SqlCommand)
+
+ SqlCommand = """Update Identifier%s set BelongsToFunction = %s, Model = %s where BelongsToFile = %s and Model = %s and EndLine = %s""" % \
+ (BelongsToFile, FunctionID, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, BelongsToFile, DataClass.MODEL_IDENTIFIER_COMMENT, StartLine - 1)
+ self.TblIdentifier.Exec(SqlCommand)
+# #
+# # Check whether an identifier belongs to a function
+# #
+# print Data1
+# SqlCommand = """Update ? set BelongsToFunction = ? where BelongsToFile = ? and StartLine > ? and EndLine < ?"""
+# print SqlCommand
+# EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+# self.Cur.executemany(SqlCommand, Data1)
+#
+# #
+# # Check whether the identifier is a function header
+# #
+# EdkLogger.debug(4, "For function headers ... ")
+# SqlCommand = """Update ? set BelongsToFunction = ?, Model = ? where BelongsToFile = ? and Model = ? and EndLine = ?"""
+# EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+# self.Cur.executemany(SqlCommand, Data2)
+#
+# EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
+
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ #EdkLogger.SetLevel(EdkLogger.VERBOSE)
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+ EdkLogger.verbose("Start at " + time.strftime('%H:%M:%S', time.localtime()))
+
+ Db = Database(DATABASE_PATH)
+ Db.InitDatabase()
+ Db.QueryTable(Db.TblDataModel)
+
+ identifier1 = DataClass.IdentifierClass(-1, '', '', "i''1", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 32, 43, 54, 43)
+ identifier2 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 15, 43, 20, 43)
+ identifier3 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 55, 43, 58, 43)
+ identifier4 = DataClass.IdentifierClass(-1, '', '', "i1'", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 77, 43, 88, 43)
+ fun1 = DataClass.FunctionClass(-1, '', '', 'fun1', '', 21, 2, 60, 45, 1, 23, 0, [], [])
+ file = DataClass.FileClass(-1, 'F1', 'c', 'C:\\', 'C:\\F1.exe', DataClass.MODEL_FILE_C, '2007-12-28', [fun1], [identifier1, identifier2, identifier3, identifier4], [])
+ Db.InsertOneFile(file)
+ Db.UpdateIdentifierBelongsToFunction()
+
+ Db.QueryTable(Db.TblFile)
+ Db.QueryTable(Db.TblFunction)
+ Db.QueryTable(Db.TblPcd)
+ Db.QueryTable(Db.TblIdentifier)
+
+ Db.Close()
+ EdkLogger.verbose("End at " + time.strftime('%H:%M:%S', time.localtime()))
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/EccGlobalData.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/EccGlobalData.py
new file mode 100644
index 00000000..e78398b3
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/EccGlobalData.py
@@ -0,0 +1,21 @@
+## @file
+# This file is used to save global datas used by ECC tool
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+import Common.LongFilePathOs as os
+
+gWorkspace = ''
+gTarget = ''
+gConfig = None
+gDb = None
+gIdentifierTableList = []
+gCFileList = []
+gHFileList = []
+gUFileList = []
+gException = None
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/EccMain.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/EccMain.py
new file mode 100755
index 00000000..9190a5ea
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/EccMain.py
@@ -0,0 +1,415 @@
+## @file
+# This file is used to be the main entrance of ECC tool
+#
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) Microsoft Corporation.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.LongFilePathOs as os, time, glob, sys
+import Common.EdkLogger as EdkLogger
+from Ecc import Database
+from Ecc import EccGlobalData
+from Ecc.MetaDataParser import *
+from optparse import OptionParser
+from Ecc.Configuration import Configuration
+from Ecc.Check import Check
+import Common.GlobalData as GlobalData
+
+from Common.StringUtils import NormPath
+from Common.BuildVersion import gBUILD_VERSION
+from Common import BuildToolError
+from Common.Misc import PathClass
+from Common.Misc import DirCache
+from Ecc.MetaFileWorkspace.MetaFileParser import DscParser
+from Ecc.MetaFileWorkspace.MetaFileParser import DecParser
+from Ecc.MetaFileWorkspace.MetaFileParser import InfParser
+from Ecc.MetaFileWorkspace.MetaFileParser import Fdf
+from Ecc.MetaFileWorkspace.MetaFileTable import MetaFileStorage
+from Ecc import c
+import re, string
+from Ecc.Exception import *
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+
+## Ecc
+#
+# This class is used to define Ecc main entrance
+#
+# @param object: Inherited from object class
+#
+class Ecc(object):
+ def __init__(self):
+ # Version and Copyright
+ self.VersionNumber = ("1.0" + " Build " + gBUILD_VERSION)
+ self.Version = "%prog Version " + self.VersionNumber
+ self.Copyright = "Copyright (c) 2009 - 2018, Intel Corporation All rights reserved."
+
+ self.InitDefaultConfigIni()
+ self.OutputFile = 'output.txt'
+ self.ReportFile = 'Report.csv'
+ self.ExceptionFile = 'exception.xml'
+ self.IsInit = True
+ self.ScanSourceCode = True
+ self.ScanMetaData = True
+ self.MetaFile = ''
+ self.OnlyScan = None
+
+ # Parse the options and args
+ self.ParseOption()
+ EdkLogger.info(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n")
+
+ WorkspaceDir = os.path.normcase(os.path.normpath(os.environ["WORKSPACE"]))
+ os.environ["WORKSPACE"] = WorkspaceDir
+
+ # set multiple workspace
+ PackagesPath = os.getenv("PACKAGES_PATH")
+ mws.setWs(WorkspaceDir, PackagesPath)
+
+ GlobalData.gWorkspace = WorkspaceDir
+
+ GlobalData.gGlobalDefines["WORKSPACE"] = WorkspaceDir
+
+ EdkLogger.info("Loading ECC configuration ... done")
+ # Generate checkpoints list
+ EccGlobalData.gConfig = Configuration(self.ConfigFile)
+
+ # Generate exception list
+ EccGlobalData.gException = ExceptionCheck(self.ExceptionFile)
+
+ # Init Ecc database
+ EccGlobalData.gDb = Database.Database(Database.DATABASE_PATH)
+ EccGlobalData.gDb.InitDatabase(self.IsInit)
+
+ #
+ # Get files real name in workspace dir
+ #
+ GlobalData.gAllFiles = DirCache(GlobalData.gWorkspace)
+
+ # Build ECC database
+# self.BuildDatabase()
+ self.DetectOnlyScanDirs()
+
+ # Start to check
+ self.Check()
+
+ # Show report
+ self.GenReport()
+
+ # Close Database
+ EccGlobalData.gDb.Close()
+
+ def InitDefaultConfigIni(self):
+ paths = map(lambda p: os.path.join(p, 'Ecc', 'config.ini'), sys.path)
+ paths = (os.path.realpath('config.ini'),) + tuple(paths)
+ for path in paths:
+ if os.path.exists(path):
+ self.ConfigFile = path
+ return
+ self.ConfigFile = 'config.ini'
+
+
+ ## DetectOnlyScan
+ #
+ # Detect whether only scanned folders have been enabled
+ #
+ def DetectOnlyScanDirs(self):
+ if self.OnlyScan == True:
+ OnlyScanDirs = []
+ # Use regex here if multiple spaces or TAB exists in ScanOnlyDirList in config.ini file
+ for folder in re.finditer(r'\S+', EccGlobalData.gConfig.ScanOnlyDirList):
+ OnlyScanDirs.append(folder.group())
+ if len(OnlyScanDirs) != 0:
+ self.BuildDatabase(OnlyScanDirs)
+ else:
+ EdkLogger.error("ECC", BuildToolError.OPTION_VALUE_INVALID, ExtraData="Use -f option need to fill specific folders in config.ini file")
+ else:
+ self.BuildDatabase()
+
+
+ ## BuildDatabase
+ #
+ # Build the database for target
+ #
+ def BuildDatabase(self, SpeciDirs = None):
+ # Clean report table
+ EccGlobalData.gDb.TblReport.Drop()
+ EccGlobalData.gDb.TblReport.Create()
+
+ # Build database
+ if self.IsInit:
+ if self.ScanMetaData:
+ EdkLogger.quiet("Building database for Meta Data File ...")
+ self.BuildMetaDataFileDatabase(SpeciDirs)
+ if self.ScanSourceCode:
+ EdkLogger.quiet("Building database for Meta Data File Done!")
+ if SpeciDirs is None:
+ c.CollectSourceCodeDataIntoDB(EccGlobalData.gTarget)
+ else:
+ for specificDir in SpeciDirs:
+ c.CollectSourceCodeDataIntoDB(os.path.join(EccGlobalData.gTarget, specificDir))
+
+ EccGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EccGlobalData.gDb)
+ EccGlobalData.gCFileList = GetFileList(MODEL_FILE_C, EccGlobalData.gDb)
+ EccGlobalData.gHFileList = GetFileList(MODEL_FILE_H, EccGlobalData.gDb)
+ EccGlobalData.gUFileList = GetFileList(MODEL_FILE_UNI, EccGlobalData.gDb)
+
+ ## BuildMetaDataFileDatabase
+ #
+ # Build the database for meta data files
+ #
+ def BuildMetaDataFileDatabase(self, SpecificDirs = None):
+ ScanFolders = []
+ if SpecificDirs is None:
+ ScanFolders.append(EccGlobalData.gTarget)
+ else:
+ for specificDir in SpecificDirs:
+ ScanFolders.append(os.path.join(EccGlobalData.gTarget, specificDir))
+ EdkLogger.quiet("Building database for meta data files ...")
+ Op = open(EccGlobalData.gConfig.MetaDataFileCheckPathOfGenerateFileList, 'w+')
+ #SkipDirs = Read from config file
+ SkipDirs = EccGlobalData.gConfig.SkipDirList
+ SkipDirString = '|'.join(SkipDirs)
+# p = re.compile(r'.*[\\/](?:%s)[\\/]?.*' % SkipDirString)
+ p = re.compile(r'.*[\\/](?:%s^\S)[\\/]?.*' % SkipDirString)
+ for scanFolder in ScanFolders:
+ for Root, Dirs, Files in os.walk(scanFolder):
+ if p.match(Root.upper()):
+ continue
+ for Dir in Dirs:
+ Dirname = os.path.join(Root, Dir)
+ if os.path.islink(Dirname):
+ Dirname = os.path.realpath(Dirname)
+ if os.path.isdir(Dirname):
+ # symlinks to directories are treated as directories
+ Dirs.remove(Dir)
+ Dirs.append(Dirname)
+
+ for File in Files:
+ if len(File) > 4 and File[-4:].upper() == ".DEC":
+ Filename = os.path.normpath(os.path.join(Root, File))
+ EdkLogger.quiet("Parsing %s" % Filename)
+ Op.write("%s\r" % Filename)
+ #Dec(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
+ self.MetaFile = DecParser(Filename, MODEL_FILE_DEC, EccGlobalData.gDb.TblDec)
+ self.MetaFile.Start()
+ continue
+ if len(File) > 4 and File[-4:].upper() == ".DSC":
+ Filename = os.path.normpath(os.path.join(Root, File))
+ EdkLogger.quiet("Parsing %s" % Filename)
+ Op.write("%s\r" % Filename)
+ #Dsc(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
+ self.MetaFile = DscParser(PathClass(Filename, Root), MODEL_FILE_DSC, MetaFileStorage(EccGlobalData.gDb.TblDsc.Cur, Filename, MODEL_FILE_DSC, True))
+ # always do post-process, in case of macros change
+ self.MetaFile.DoPostProcess()
+ self.MetaFile.Start()
+ self.MetaFile._PostProcess()
+ continue
+ if len(File) > 4 and File[-4:].upper() == ".INF":
+ Filename = os.path.normpath(os.path.join(Root, File))
+ EdkLogger.quiet("Parsing %s" % Filename)
+ Op.write("%s\r" % Filename)
+ #Inf(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
+ self.MetaFile = InfParser(Filename, MODEL_FILE_INF, EccGlobalData.gDb.TblInf)
+ self.MetaFile.Start()
+ continue
+ if len(File) > 4 and File[-4:].upper() == ".FDF":
+ Filename = os.path.normpath(os.path.join(Root, File))
+ EdkLogger.quiet("Parsing %s" % Filename)
+ Op.write("%s\r" % Filename)
+ Fdf(Filename, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
+ continue
+ if len(File) > 4 and File[-4:].upper() == ".UNI":
+ Filename = os.path.normpath(os.path.join(Root, File))
+ EdkLogger.quiet("Parsing %s" % Filename)
+ Op.write("%s\r" % Filename)
+ FileID = EccGlobalData.gDb.TblFile.InsertFile(Filename, MODEL_FILE_UNI)
+ EccGlobalData.gDb.TblReport.UpdateBelongsToItemByFile(FileID, File)
+ continue
+
+ Op.close()
+
+ # Commit to database
+ EccGlobalData.gDb.Conn.commit()
+
+ EdkLogger.quiet("Building database for meta data files done!")
+
+ ##
+ #
+ # Check each checkpoint
+ #
+ def Check(self):
+ EdkLogger.quiet("Checking ...")
+ EccCheck = Check()
+ EccCheck.Check()
+ EdkLogger.quiet("Checking done!")
+
+ ##
+ #
+ # Generate the scan report
+ #
+ def GenReport(self):
+ EdkLogger.quiet("Generating report ...")
+ EccGlobalData.gDb.TblReport.ToCSV(self.ReportFile)
+ EdkLogger.quiet("Generating report done!")
+
+ def GetRealPathCase(self, path):
+ TmpPath = path.rstrip(os.sep)
+ PathParts = TmpPath.split(os.sep)
+ if len(PathParts) == 0:
+ return path
+ if len(PathParts) == 1:
+ if PathParts[0].strip().endswith(':'):
+ return PathParts[0].upper()
+ # Relative dir, list . current dir
+ Dirs = os.listdir('.')
+ for Dir in Dirs:
+ if Dir.upper() == PathParts[0].upper():
+ return Dir
+
+ if PathParts[0].strip().endswith(':'):
+ PathParts[0] = PathParts[0].upper()
+ ParentDir = PathParts[0]
+ RealPath = ParentDir
+ if PathParts[0] == '':
+ RealPath = os.sep
+ ParentDir = os.sep
+
+ PathParts.remove(PathParts[0]) # need to remove the parent
+ for Part in PathParts:
+ Dirs = os.listdir(ParentDir + os.sep)
+ for Dir in Dirs:
+ if Dir.upper() == Part.upper():
+ RealPath += os.sep
+ RealPath += Dir
+ break
+ ParentDir += os.sep
+ ParentDir += Dir
+
+ return RealPath
+
+ ## ParseOption
+ #
+ # Parse options
+ #
+ def ParseOption(self):
+ (Options, Target) = self.EccOptionParser()
+
+ if Options.Workspace:
+ os.environ["WORKSPACE"] = Options.Workspace
+
+ # Check workspace environment
+ if "WORKSPACE" not in os.environ:
+ EdkLogger.error("ECC", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
+ ExtraData="WORKSPACE")
+ else:
+ EccGlobalData.gWorkspace = os.path.normpath(os.getenv("WORKSPACE"))
+ if not os.path.exists(EccGlobalData.gWorkspace):
+ EdkLogger.error("ECC", BuildToolError.FILE_NOT_FOUND, ExtraData="WORKSPACE = %s" % EccGlobalData.gWorkspace)
+ os.environ["WORKSPACE"] = EccGlobalData.gWorkspace
+ # Set log level
+ self.SetLogLevel(Options)
+
+ # Set other options
+ if Options.ConfigFile is not None:
+ self.ConfigFile = Options.ConfigFile
+ if Options.OutputFile is not None:
+ self.OutputFile = Options.OutputFile
+ if Options.ReportFile is not None:
+ self.ReportFile = Options.ReportFile
+ if Options.ExceptionFile is not None:
+ self.ExceptionFile = Options.ExceptionFile
+ if Options.Target is not None:
+ if not os.path.isdir(Options.Target):
+ EdkLogger.error("ECC", BuildToolError.OPTION_VALUE_INVALID, ExtraData="Target [%s] does NOT exist" % Options.Target)
+ else:
+ EccGlobalData.gTarget = self.GetRealPathCase(os.path.normpath(Options.Target))
+ else:
+ EdkLogger.warn("Ecc", EdkLogger.ECC_ERROR, "The target source tree was not specified, using current WORKSPACE instead!")
+ EccGlobalData.gTarget = os.path.normpath(os.getenv("WORKSPACE"))
+ if Options.keepdatabase is not None:
+ self.IsInit = False
+ if Options.metadata is not None and Options.sourcecode is not None:
+ EdkLogger.error("ECC", BuildToolError.OPTION_CONFLICT, ExtraData="-m and -s can't be specified at one time")
+ if Options.metadata is not None:
+ self.ScanSourceCode = False
+ if Options.sourcecode is not None:
+ self.ScanMetaData = False
+ if Options.folders is not None:
+ self.OnlyScan = True
+
+ ## SetLogLevel
+ #
+ # Set current log level of the tool based on args
+ #
+ # @param Option: The option list including log level setting
+ #
+ def SetLogLevel(self, Option):
+ if Option.verbose is not None:
+ EdkLogger.SetLevel(EdkLogger.VERBOSE)
+ elif Option.quiet is not None:
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ elif Option.debug is not None:
+ EdkLogger.SetLevel(Option.debug + 1)
+ else:
+ EdkLogger.SetLevel(EdkLogger.INFO)
+
+ ## Parse command line options
+ #
+ # Using standard Python module optparse to parse command line option of this tool.
+ #
+ # @retval Opt A optparse.Values object containing the parsed options
+ # @retval Args Target of build command
+ #
+ def EccOptionParser(self):
+ Parser = OptionParser(description = self.Copyright, version = self.Version, prog = "Ecc.exe", usage = "%prog [options]")
+ Parser.add_option("-t", "--target sourcepath", action="store", type="string", dest='Target',
+ help="Check all files under the target workspace.")
+ Parser.add_option("-c", "--config filename", action="store", type="string", dest="ConfigFile",
+ help="Specify a configuration file. Defaultly use config.ini under ECC tool directory.")
+ Parser.add_option("-o", "--outfile filename", action="store", type="string", dest="OutputFile",
+ help="Specify the name of an output file, if and only if one filename was specified.")
+ Parser.add_option("-r", "--reportfile filename", action="store", type="string", dest="ReportFile",
+ help="Specify the name of an report file, if and only if one filename was specified.")
+ Parser.add_option("-e", "--exceptionfile filename", action="store", type="string", dest="ExceptionFile",
+ help="Specify the name of an exception file, if and only if one filename was specified.")
+ Parser.add_option("-m", "--metadata", action="store_true", type=None, help="Only scan meta-data files information if this option is specified.")
+ Parser.add_option("-s", "--sourcecode", action="store_true", type=None, help="Only scan source code files information if this option is specified.")
+ Parser.add_option("-k", "--keepdatabase", action="store_true", type=None, help="The existing Ecc database will not be cleaned except report information if this option is specified.")
+ Parser.add_option("-l", "--log filename", action="store", dest="LogFile", help="""If specified, the tool should emit the changes that
+ were made by the tool after printing the result message.
+ If filename, the emit to the file, otherwise emit to
+ standard output. If no modifications were made, then do not
+ create a log file, or output a log message.""")
+ Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
+ Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed, "\
+ "including library instances selected, final dependency expression, "\
+ "and warning messages, etc.")
+ Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
+ Parser.add_option("-w", "--workspace", action="store", type="string", dest='Workspace', help="Specify workspace.")
+ Parser.add_option("-f", "--folders", action="store_true", type=None, help="Only scanning specified folders which are recorded in config.ini file.")
+
+ (Opt, Args)=Parser.parse_args()
+
+ return (Opt, Args)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ # Initialize log system
+ EdkLogger.Initialize()
+ EdkLogger.IsRaiseError = False
+
+ StartTime = time.perf_counter()
+ Ecc = Ecc()
+ FinishTime = time.perf_counter()
+
+ BuildDuration = time.strftime("%M:%S", time.gmtime(int(round(FinishTime - StartTime))))
+ EdkLogger.quiet("\n%s [%s]" % (time.strftime("%H:%M:%S, %b.%d %Y", time.localtime()), BuildDuration))
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/EccToolError.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/EccToolError.py
new file mode 100644
index 00000000..7765f0e5
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/EccToolError.py
@@ -0,0 +1,205 @@
+## @file
+# Standardized Error Handling infrastructures.
+#
+# Copyright (c) 2021, Arm Limited. All rights reserved.<BR>
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+ERROR_GENERAL_CHECK_ALL = 1000
+ERROR_GENERAL_CHECK_NO_TAB = 1001
+ERROR_GENERAL_CHECK_INDENTATION = 1002
+ERROR_GENERAL_CHECK_LINE = 1003
+ERROR_GENERAL_CHECK_NO_ASM = 1004
+ERROR_GENERAL_CHECK_NO_PROGMA = 1005
+ERROR_GENERAL_CHECK_CARRIAGE_RETURN = 1006
+ERROR_GENERAL_CHECK_FILE_EXISTENCE = 1007
+ERROR_GENERAL_CHECK_NON_ACSII = 1008
+ERROR_GENERAL_CHECK_UNI = 1009
+ERROR_GENERAL_CHECK_UNI_HELP_INFO = 1010
+ERROR_GENERAL_CHECK_INVALID_LINE_ENDING = 1011
+ERROR_GENERAL_CHECK_TRAILING_WHITE_SPACE_LINE = 1012
+
+ERROR_SPACE_CHECK_ALL = 2000
+
+ERROR_PREDICATE_EXPRESSION_CHECK_ALL = 3000
+ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE = 3001
+ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR = 3002
+ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE = 3003
+
+ERROR_HEADER_CHECK_ALL = 4000
+ERROR_HEADER_CHECK_FILE = 4001
+ERROR_HEADER_CHECK_FUNCTION = 4002
+
+ERROR_C_FUNCTION_LAYOUT_CHECK_ALL = 5000
+ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE = 5001
+ERROR_C_FUNCTION_LAYOUT_CHECK_OPTIONAL_FUNCTIONAL_MODIFIER = 5002
+ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME = 5003
+ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE = 5004
+ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY = 5005
+ERROR_C_FUNCTION_LAYOUT_CHECK_DATA_DECLARATION = 5006
+ERROR_C_FUNCTION_LAYOUT_CHECK_NO_INIT_OF_VARIABLE = 5007
+ERROR_C_FUNCTION_LAYOUT_CHECK_NO_STATIC = 5008
+ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_2 = 5009
+ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_3 = 5010
+
+ERROR_INCLUDE_FILE_CHECK_ALL = 6000
+ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_1 = 6001
+ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_2 = 6002
+ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_3 = 6003
+ERROR_INCLUDE_FILE_CHECK_DATA = 6004
+ERROR_INCLUDE_FILE_CHECK_NAME = 6005
+
+ERROR_DECLARATION_DATA_TYPE_CHECK_ALL = 7000
+ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE = 7001
+ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER = 7002
+ERROR_DECLARATION_DATA_TYPE_CHECK_EFI_API_MODIFIER = 7003
+ERROR_DECLARATION_DATA_TYPE_CHECK_ENUMERATED_TYPE = 7004
+ERROR_DECLARATION_DATA_TYPE_CHECK_STRUCTURE_DECLARATION = 7005
+ERROR_DECLARATION_DATA_TYPE_CHECK_SAME_STRUCTURE = 7007
+ERROR_DECLARATION_DATA_TYPE_CHECK_UNION_TYPE = 7006
+ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE = 7008
+
+ERROR_NAMING_CONVENTION_CHECK_ALL = 8000
+ERROR_NAMING_CONVENTION_CHECK_DEFINE_STATEMENT = 8001
+ERROR_NAMING_CONVENTION_CHECK_TYPEDEF_STATEMENT = 8002
+ERROR_NAMING_CONVENTION_CHECK_IFNDEF_STATEMENT = 8003
+ERROR_NAMING_CONVENTION_CHECK_PATH_NAME = 8004
+ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME = 8005
+ERROR_NAMING_CONVENTION_CHECK_FUNCTION_NAME = 8006
+ERROR_NAMING_CONVENTION_CHECK_SINGLE_CHARACTER_VARIABLE = 8007
+
+ERROR_DOXYGEN_CHECK_ALL = 9000
+ERROR_DOXYGEN_CHECK_FILE_HEADER = 9001
+ERROR_DOXYGEN_CHECK_FUNCTION_HEADER = 9002
+ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION = 9003
+ERROR_DOXYGEN_CHECK_COMMENT_FORMAT = 9004
+ERROR_DOXYGEN_CHECK_COMMAND = 9005
+
+ERROR_META_DATA_FILE_CHECK_ALL = 10000
+ERROR_META_DATA_FILE_CHECK_PATH_NAME = 10001
+ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1 = 10002
+ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_2 = 10003
+ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_DEPENDENT = 10004
+ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_ORDER = 10005
+ERROR_META_DATA_FILE_CHECK_LIBRARY_NO_USE = 10006
+ERROR_META_DATA_FILE_CHECK_BINARY_INF_IN_FDF = 10007
+ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE = 10008
+ERROR_META_DATA_FILE_CHECK_PCD_FLASH = 10009
+ERROR_META_DATA_FILE_CHECK_PCD_NO_USE = 10010
+ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID = 10011
+ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL = 10012
+ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI = 10013
+ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE = 10014
+ERROR_META_DATA_FILE_CHECK_PCD_TYPE = 10015
+ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION = 10016
+ERROR_META_DATA_FILE_CHECK_LIBRARY_NAME_DUPLICATE = 10017
+ERROR_META_DATA_FILE_CHECK_FORMAT_GUID = 10018
+ERROR_META_DATA_FILE_CHECK_FORMAT_PROTOCOL = 10019
+ERROR_META_DATA_FILE_CHECK_FORMAT_PPI = 10020
+ERROR_META_DATA_FILE_CHECK_FORMAT_PCD = 10021
+ERROR_META_DATA_FILE_CHECK_LIBRARY_NOT_DEFINED = 10022
+
+ERROR_SPELLING_CHECK_ALL = 11000
+
+ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE = 12001
+
+gEccErrorMessage = {
+ ERROR_GENERAL_CHECK_ALL : "",
+ ERROR_GENERAL_CHECK_NO_TAB : "'TAB' character is not allowed in source code, please replace each 'TAB' with two spaces.",
+ ERROR_GENERAL_CHECK_INDENTATION : "Indentation does not follow coding style",
+ ERROR_GENERAL_CHECK_LINE : "The width of each line does not follow coding style",
+ ERROR_GENERAL_CHECK_NO_ASM : "There should be no use of _asm in the source file",
+ ERROR_GENERAL_CHECK_NO_PROGMA : """There should be no use of "#progma" in source file except "#pragma pack(#)\"""",
+ ERROR_GENERAL_CHECK_CARRIAGE_RETURN : "There should be a carriage return at the end of the file",
+ ERROR_GENERAL_CHECK_FILE_EXISTENCE : "File not found",
+ ERROR_GENERAL_CHECK_NON_ACSII : "File has invalid Non-ACSII char",
+ ERROR_GENERAL_CHECK_UNI : "File is not a valid UTF-16 UNI file",
+ ERROR_GENERAL_CHECK_UNI_HELP_INFO : "UNI file that is associated by INF or DEC file need define the prompt and help information.",
+ ERROR_GENERAL_CHECK_INVALID_LINE_ENDING : "Only CRLF (Carriage Return Line Feed) is allowed to line ending.",
+ ERROR_GENERAL_CHECK_TRAILING_WHITE_SPACE_LINE : "There should be no trailing white space in one line.",
+
+ ERROR_SPACE_CHECK_ALL : "",
+
+ ERROR_PREDICATE_EXPRESSION_CHECK_ALL : "",
+ ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE : "Boolean values and variable type BOOLEAN should not use explicit comparisons to TRUE or FALSE",
+ ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR : "Non-Boolean comparisons should use a compare operator (==, !=, >, < >=, <=)",
+ ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE : "A comparison of any pointer to zero must be done via the NULL type",
+
+ ERROR_HEADER_CHECK_ALL : "",
+ ERROR_HEADER_CHECK_FILE : "File header doesn't exist",
+ ERROR_HEADER_CHECK_FUNCTION : "Function header doesn't exist",
+
+ ERROR_C_FUNCTION_LAYOUT_CHECK_ALL : "",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE : "Return type of a function should exist and in the first line",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_OPTIONAL_FUNCTIONAL_MODIFIER : "Any optional functional modifiers should exist and next to the return type",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME : """Function name should be left justified, followed by the beginning of the parameter list, with the closing parenthesis on its own line, indented two spaces""",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE : "Function prototypes in include files have the same form as function definitions",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_2 : "Function prototypes in include files have different parameter number with function definitions",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_3 : "Function prototypes in include files have different parameter modifier with function definitions",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY : "The body of a function should be contained by open and close braces that must be in the first column",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_DATA_DECLARATION : "The data declarations should be the first code in a module",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_NO_INIT_OF_VARIABLE : "There should be no initialization of a variable as part of its declaration",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_NO_STATIC : "There should be no use of STATIC for functions",
+
+ ERROR_INCLUDE_FILE_CHECK_ALL : "",
+ ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_1 : "All include file contents should be guarded by a #ifndef statement.",
+ ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_2 : "The #ifndef must be the first line of code following the file header comment",
+ ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_3 : "The #endif must appear on the last line in the file",
+ ERROR_INCLUDE_FILE_CHECK_DATA : "Include files should contain only public or only private data and cannot contain code or define data variables",
+ ERROR_INCLUDE_FILE_CHECK_NAME : "No permission for the include file with same names",
+
+ ERROR_DECLARATION_DATA_TYPE_CHECK_ALL : "",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE : "There should be no use of int, unsigned, char, void, long in any .c, .h or .asl files",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER : """The modifiers IN, OUT, OPTIONAL, and UNALIGNED should be used only to qualify arguments to a function and should not appear in a data type declaration""",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_EFI_API_MODIFIER : "The EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_ENUMERATED_TYPE : "Enumerated Type should have a 'typedef' and the name must be in capital letters",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_STRUCTURE_DECLARATION : "Structure Type should have a 'typedef' and the name must be in capital letters",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_SAME_STRUCTURE : "No permission for the structure with same names",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_UNION_TYPE : "Union Type should have a 'typedef' and the name must be in capital letters",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE : "Complex types should be typedef-ed",
+
+ ERROR_NAMING_CONVENTION_CHECK_ALL : "",
+ ERROR_NAMING_CONVENTION_CHECK_DEFINE_STATEMENT : "Only capital letters are allowed to be used for #define declarations",
+ ERROR_NAMING_CONVENTION_CHECK_TYPEDEF_STATEMENT : "Only capital letters are allowed to be used for typedef declarations",
+ ERROR_NAMING_CONVENTION_CHECK_IFNDEF_STATEMENT : "The #ifndef at the start of an include file should have one postfix underscore, and no prefix underscore character '_'",
+ ERROR_NAMING_CONVENTION_CHECK_PATH_NAME : """Path name does not follow the rules: 1. First character should be upper case 2. Must contain lower case characters 3. No white space characters""",
+ ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME : """Variable name does not follow the rules: 1. First character should be upper case 2. Must contain lower case characters 3. No white space characters 4. Global variable name must start with a 'g'""",
+ ERROR_NAMING_CONVENTION_CHECK_FUNCTION_NAME : """Function name does not follow the rules: 1. First character should be upper case 2. Must contain lower case characters 3. No white space characters""",
+ ERROR_NAMING_CONVENTION_CHECK_SINGLE_CHARACTER_VARIABLE : "There should be no use of short (single character) variable names",
+
+ ERROR_DOXYGEN_CHECK_ALL : "",
+ ERROR_DOXYGEN_CHECK_FILE_HEADER : "The file headers should follow Doxygen special documentation blocks in section 2.3.5",
+ ERROR_DOXYGEN_CHECK_FUNCTION_HEADER : "The function headers should follow Doxygen special documentation blocks in section 2.3.5",
+ ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION : """The first line of text in a comment block should be a brief description of the element being documented and the brief description must end with a period.""",
+ ERROR_DOXYGEN_CHECK_COMMENT_FORMAT : "For comment line with '///< ... text ...' format, if it is used, it should be after the code section",
+ ERROR_DOXYGEN_CHECK_COMMAND : "Only Doxygen commands '@bug', '@todo', '@example', '@file', '@attention', '@param', '@post', '@pre', '@retval', '@return', '@sa', '@since', '@test', '@note', '@par', '@endcode', '@code', '@{', '@}' are allowed to mark the code",
+
+ ERROR_META_DATA_FILE_CHECK_ALL : "",
+ ERROR_META_DATA_FILE_CHECK_PATH_NAME : "The file defined in meta-data does not exist",
+ ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1 : "A library instances defined for a given module (or dependent library instance) doesn't match the module's type.",
+ ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_2 : "A library instance must specify the Supported Module Types in its INF file",
+ ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_DEPENDENT : "A library instance must be defined for all dependent library classes",
+ ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_ORDER : "The library Instances specified by the LibraryClasses sections should be listed in order of dependencies",
+ ERROR_META_DATA_FILE_CHECK_LIBRARY_NO_USE : "There should be no unnecessary inclusion of library classes in the INF file",
+ ERROR_META_DATA_FILE_CHECK_LIBRARY_NAME_DUPLICATE : "Duplicate Library Class Name found",
+ ERROR_META_DATA_FILE_CHECK_BINARY_INF_IN_FDF : "An INF file is specified in the FDF file, but not in the DSC file, therefore the INF file must be for a Binary module only",
+ ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE : "Duplicate PCDs found",
+ ERROR_META_DATA_FILE_CHECK_PCD_FLASH : "PCD settings in the FDF file should only be related to flash",
+ ERROR_META_DATA_FILE_CHECK_PCD_NO_USE : "There should be no PCDs declared in INF files that are not specified in in either a DSC or FDF file",
+ ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID : "Duplicate GUID found",
+ ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL : "Duplicate PROTOCOL found",
+ ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI : "Duplicate PPI found",
+ ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE : "No used module files found",
+ ERROR_META_DATA_FILE_CHECK_PCD_TYPE : "Wrong C code function used for this kind of PCD",
+ ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION : "Module file has FILE_GUID collision with other module file",
+ ERROR_META_DATA_FILE_CHECK_FORMAT_GUID : "Wrong GUID Format used in Module file",
+ ERROR_META_DATA_FILE_CHECK_FORMAT_PROTOCOL : "Wrong Protocol Format used in Module file",
+ ERROR_META_DATA_FILE_CHECK_FORMAT_PPI : "Wrong Ppi Format used in Module file",
+ ERROR_META_DATA_FILE_CHECK_FORMAT_PCD : "Wrong Pcd Format used in Module file",
+ ERROR_META_DATA_FILE_CHECK_LIBRARY_NOT_DEFINED : "Not defined LibraryClass used in the Module file.",
+ ERROR_SPELLING_CHECK_ALL : "",
+
+ ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE : "SMM communication function may use wrong parameter type",
+ }
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Exception.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Exception.py
new file mode 100755
index 00000000..1c2320fe
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Exception.py
@@ -0,0 +1,83 @@
+## @file
+# This file is used to parse exception items found by ECC tool
+#
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+from Ecc.Xml.XmlRoutines import *
+import Common.LongFilePathOs as os
+
+# ExceptionXml to parse Exception Node of XML file
+class ExceptionXml(object):
+ def __init__(self):
+ self.KeyWord = ''
+ self.ErrorID = ''
+ self.FilePath = ''
+
+ def FromXml(self, Item, Key):
+ self.KeyWord = XmlElement(Item, '%s/KeyWord' % Key)
+ self.ErrorID = XmlElement(Item, '%s/ErrorID' % Key)
+ self.FilePath = os.path.normpath(XmlElement(Item, '%s/FilePath' % Key))
+
+ def __str__(self):
+ return 'ErrorID = %s KeyWord = %s FilePath = %s' %(self.ErrorID, self.KeyWord, self.FilePath)
+
+# ExceptionListXml to parse Exception Node List of XML file
+class ExceptionListXml(object):
+ def __init__(self):
+ self.List = []
+
+ def FromXmlFile(self, FilePath):
+ XmlContent = XmlParseFile(FilePath)
+ for Item in XmlList(XmlContent, '/ExceptionList/Exception'):
+ Exp = ExceptionXml()
+ Exp.FromXml(Item, 'Exception')
+ self.List.append(Exp)
+
+ def ToList(self):
+ RtnList = []
+ for Item in self.List:
+ #RtnList.append((Item.ErrorID, Item.KeyWord, Item.FilePath))
+ RtnList.append((Item.ErrorID, Item.KeyWord))
+
+ return RtnList
+
+ def __str__(self):
+ RtnStr = ''
+ if self.List:
+ for Item in self.List:
+ RtnStr = RtnStr + str(Item) + '\n'
+ return RtnStr
+
+# A class to check exception
+class ExceptionCheck(object):
+ def __init__(self, FilePath = None):
+ self.ExceptionList = []
+ self.ExceptionListXml = ExceptionListXml()
+ self.LoadExceptionListXml(FilePath)
+
+ def LoadExceptionListXml(self, FilePath):
+ if FilePath and os.path.isfile(FilePath):
+ self.ExceptionListXml.FromXmlFile(FilePath)
+ self.ExceptionList = self.ExceptionListXml.ToList()
+
+ def IsException(self, ErrorID, KeyWord, FileID=-1):
+ if (str(ErrorID), KeyWord.replace('\r\n', '\n')) in self.ExceptionList:
+ return True
+ else:
+ return False
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ El = ExceptionCheck('C:\\Hess\\Project\\BuildTool\\src\\Ecc\\exception.xml')
+ print(El.ExceptionList)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/FileProfile.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/FileProfile.py
new file mode 100755
index 00000000..0a53d94f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/FileProfile.py
@@ -0,0 +1,51 @@
+## @file
+# fragments of source file
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+
+from __future__ import absolute_import
+import re
+import Common.LongFilePathOs as os
+from Ecc.ParserWarning import Warning
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+CommentList = []
+PPDirectiveList = []
+PredicateExpressionList = []
+FunctionDefinitionList = []
+VariableDeclarationList = []
+EnumerationDefinitionList = []
+StructUnionDefinitionList = []
+TypedefDefinitionList = []
+FunctionCallingList = []
+
+## record file data when parsing source
+#
+# May raise Exception when opening file.
+#
+class FileProfile :
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.FileLinesList = []
+ self.FileLinesListFromFile = []
+ try:
+ fsock = open(FileName, "r")
+ try:
+ self.FileLinesListFromFile = fsock.readlines()
+ finally:
+ fsock.close()
+
+ except IOError:
+ raise Warning("Error when opening file %s" % FileName)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaDataParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaDataParser.py
new file mode 100755
index 00000000..ed266819
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaDataParser.py
@@ -0,0 +1,271 @@
+## @file
+# This file is used to define common parser functions for meta-data
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import absolute_import
+import Common.LongFilePathOs as os
+from CommonDataClass.DataClass import *
+from Ecc.EccToolError import *
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from Ecc import EccGlobalData
+import re
+## Get the include path list for a source file
+#
+# 1. Find the source file belongs to which inf file
+# 2. Find the inf's package
+# 3. Return the include path list of the package
+#
+def GetIncludeListOfFile(WorkSpace, Filepath, Db):
+ IncludeList = []
+ Filepath = os.path.normpath(Filepath)
+ SqlCommand = """
+ select Value1, FullPath from Inf, File where Inf.Model = %s and Inf.BelongsToFile in(
+ select distinct B.BelongsToFile from File as A left join Inf as B
+ where A.ID = B.BelongsToFile and B.Model = %s and (A.Path || '%s' || B.Value1) = '%s')
+ and Inf.BelongsToFile = File.ID""" \
+ % (MODEL_META_DATA_PACKAGE, MODEL_EFI_SOURCE_FILE, '\\', Filepath)
+ RecordSet = Db.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ DecFullPath = os.path.normpath(mws.join(WorkSpace, Record[0]))
+ InfFullPath = os.path.normpath(mws.join(WorkSpace, Record[1]))
+ (DecPath, DecName) = os.path.split(DecFullPath)
+ (InfPath, InfName) = os.path.split(InfFullPath)
+ SqlCommand = """select Value1 from Dec where BelongsToFile =
+ (select ID from File where FullPath = '%s') and Model = %s""" \
+ % (DecFullPath, MODEL_EFI_INCLUDE)
+ NewRecordSet = Db.TblDec.Exec(SqlCommand)
+ if InfPath not in IncludeList:
+ IncludeList.append(InfPath)
+ for NewRecord in NewRecordSet:
+ IncludePath = os.path.normpath(os.path.join(DecPath, NewRecord[0]))
+ if IncludePath not in IncludeList:
+ IncludeList.append(IncludePath)
+
+ return IncludeList
+
+## Get the file list
+#
+# Search table file and find all specific type files
+#
+def GetFileList(FileModel, Db):
+ FileList = []
+ SqlCommand = """select FullPath from File where Model = %s""" % str(FileModel)
+ RecordSet = Db.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ FileList.append(Record[0])
+
+ return FileList
+
+## Get the table list
+#
+# Search table file and find all small tables
+#
+def GetTableList(FileModelList, Table, Db):
+ TableList = []
+ SqlCommand = """select ID from File where Model in %s""" % str(FileModelList)
+ RecordSet = Db.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ TableName = Table + str(Record[0])
+ TableList.append(TableName)
+
+ return TableList
+
+## ParseHeaderCommentSection
+#
+# Parse Header comment section lines, extract Abstract, Description, Copyright
+# , License lines
+#
+# @param CommentList: List of (Comment, LineNumber)
+# @param FileName: FileName of the comment
+#
+def ParseHeaderCommentSection(CommentList, FileName = None):
+
+ Abstract = ''
+ Description = ''
+ Copyright = ''
+ License = ''
+ EndOfLine = "\n"
+ STR_HEADER_COMMENT_START = "@file"
+
+ #
+ # used to indicate the state of processing header comment section of dec,
+ # inf files
+ #
+ HEADER_COMMENT_NOT_STARTED = -1
+ HEADER_COMMENT_STARTED = 0
+ HEADER_COMMENT_FILE = 1
+ HEADER_COMMENT_ABSTRACT = 2
+ HEADER_COMMENT_DESCRIPTION = 3
+ HEADER_COMMENT_COPYRIGHT = 4
+ HEADER_COMMENT_LICENSE = 5
+ HEADER_COMMENT_END = 6
+ #
+ # first find the last copyright line
+ #
+ Last = 0
+ HeaderCommentStage = HEADER_COMMENT_NOT_STARTED
+ for Index in range(len(CommentList) - 1, 0, -1):
+ Line = CommentList[Index][0]
+ if _IsCopyrightLine(Line):
+ Last = Index
+ break
+
+ for Item in CommentList:
+ Line = Item[0]
+ LineNo = Item[1]
+
+ if not Line.startswith('#') and Line:
+ SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
+ ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ Msg = 'Comment must start with #'
+ EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
+ Comment = CleanString2(Line)[1]
+ Comment = Comment.strip()
+ #
+ # if there are blank lines between License or Description, keep them as they would be
+ # indication of different block; or in the position that Abstract should be, also keep it
+ # as it indicates that no abstract
+ #
+ if not Comment and HeaderCommentStage not in [HEADER_COMMENT_LICENSE, \
+ HEADER_COMMENT_DESCRIPTION, HEADER_COMMENT_ABSTRACT]:
+ continue
+
+ if HeaderCommentStage == HEADER_COMMENT_NOT_STARTED:
+ if Comment.startswith(STR_HEADER_COMMENT_START):
+ HeaderCommentStage = HEADER_COMMENT_ABSTRACT
+ else:
+ License += Comment + EndOfLine
+ else:
+ if HeaderCommentStage == HEADER_COMMENT_ABSTRACT:
+ #
+ # in case there is no abstract and description
+ #
+ if not Comment:
+ Abstract = ''
+ HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
+ elif _IsCopyrightLine(Comment):
+ Copyright += Comment + EndOfLine
+ HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
+ else:
+ Abstract += Comment + EndOfLine
+ HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
+ elif HeaderCommentStage == HEADER_COMMENT_DESCRIPTION:
+ #
+ # in case there is no description
+ #
+ if _IsCopyrightLine(Comment):
+ Copyright += Comment + EndOfLine
+ HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
+ else:
+ Description += Comment + EndOfLine
+ elif HeaderCommentStage == HEADER_COMMENT_COPYRIGHT:
+ if _IsCopyrightLine(Comment):
+ Copyright += Comment + EndOfLine
+ else:
+ #
+ # Contents after copyright line are license, those non-copyright lines in between
+ # copyright line will be discarded
+ #
+ if LineNo > Last:
+ if License:
+ License += EndOfLine
+ License += Comment + EndOfLine
+ HeaderCommentStage = HEADER_COMMENT_LICENSE
+ else:
+ if not Comment and not License:
+ continue
+ License += Comment + EndOfLine
+
+ if not Copyright.strip():
+ SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
+ ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ Msg = 'Header comment section must have copyright information'
+ EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
+
+ if not License.strip():
+ SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
+ ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ Msg = 'Header comment section must have license information'
+ EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
+
+ if not Abstract.strip() or Abstract.find('Component description file') > -1:
+ SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
+ ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ Msg = 'Header comment section must have Abstract information.'
+ EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
+
+ return Abstract.strip(), Description.strip(), Copyright.strip(), License.strip()
+
+## _IsCopyrightLine
+# check whether current line is copyright line, the criteria is whether there is case insensitive keyword "Copyright"
+# followed by zero or more white space characters followed by a "(" character
+#
+# @param LineContent: the line need to be checked
+# @return: True if current line is copyright line, False else
+#
+def _IsCopyrightLine (LineContent):
+ LineContent = LineContent.upper()
+ Result = False
+
+ #Support below Copyright format
+ # Copyright (C) 2020 Hewlett Packard Enterprise Development LP<BR>
+ # (C) Copyright 2020 Hewlett Packard Enterprise Development LP<BR>
+ ReIsCopyrightRe = re.compile(r"""(^|\s)COPYRIGHT *\(""", re.DOTALL)
+ ReIsCopyrightTypeB = re.compile(r"""(^|\s)\(C\)\s*COPYRIGHT""", re.DOTALL)
+ if ReIsCopyrightRe.search(LineContent) or ReIsCopyrightTypeB.search(LineContent):
+ Result = True
+
+ return Result
+
+
+## CleanString2
+#
+# Split comments in a string
+# Remove spaces
+#
+# @param Line: The string to be cleaned
+# @param CommentCharacter: Comment char, used to ignore comment content,
+# default is DataType.TAB_COMMENT_SPLIT
+#
+def CleanString2(Line, CommentCharacter='#', AllowCppStyleComment=False):
+ #
+ # remove whitespace
+ #
+ Line = Line.strip()
+ #
+ # Replace EDK1's comment character
+ #
+ if AllowCppStyleComment:
+ Line = Line.replace('//', CommentCharacter)
+ #
+ # separate comments and statements
+ #
+ LineParts = Line.split(CommentCharacter, 1)
+ #
+ # remove whitespace again
+ #
+ Line = LineParts[0].strip()
+ if len(LineParts) > 1:
+ Comment = LineParts[1].strip()
+ #
+ # Remove prefixed and trailing comment characters
+ #
+ Start = 0
+ End = len(Comment)
+ while Start < End and Comment.startswith(CommentCharacter, Start, End):
+ Start += 1
+ while End >= 0 and Comment.endswith(CommentCharacter, Start, End):
+ End -= 1
+ Comment = Comment[Start:End]
+ Comment = Comment.strip()
+ else:
+ Comment = ''
+
+ return Line, Comment
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py
new file mode 100755
index 00000000..09707817
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py
@@ -0,0 +1,213 @@
+## @file
+# This file is used to create/update/query/erase table for files
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+import Common.LongFilePathOs as os
+
+import Common.EdkLogger as EdkLogger
+from CommonDataClass import DataClass
+from CommonDataClass.DataClass import FileClass
+
+## Convert to SQL required string format
+def ConvertToSqlString(StringList):
+ return map(lambda s: "'" + s.replace("'", "''") + "'", StringList)
+
+## TableFile
+#
+# This class defined a common table
+#
+# @param object: Inherited from object class
+#
+# @param Cursor: Cursor of the database
+# @param TableName: Name of the table
+#
+class Table(object):
+ _COLUMN_ = ''
+ _ID_STEP_ = 1
+ _ID_MAX_ = 0x80000000
+ _DUMMY_ = 0
+
+ def __init__(self, Cursor, Name='', IdBase=0, Temporary=False):
+ self.Cur = Cursor
+ self.Table = Name
+ self.IdBase = int(IdBase)
+ self.ID = int(IdBase)
+ self.Temporary = Temporary
+
+ def __str__(self):
+ return self.Table
+
+ ## Create table
+ #
+ # Create a table
+ #
+ def Create(self, NewTable=True):
+ if NewTable:
+ self.Drop()
+
+ if self.Temporary:
+ SqlCommand = """create temp table IF NOT EXISTS %s (%s)""" % (self.Table, self._COLUMN_)
+ else:
+ SqlCommand = """create table IF NOT EXISTS %s (%s)""" % (self.Table, self._COLUMN_)
+ EdkLogger.debug(EdkLogger.DEBUG_8, SqlCommand)
+ self.Cur.execute(SqlCommand)
+ self.ID = self.GetId()
+
+ ## Insert table
+ #
+ # Insert a record into a table
+ #
+ def Insert(self, *Args):
+ self.ID = self.ID + self._ID_STEP_
+ if self.ID >= (self.IdBase + self._ID_MAX_):
+ self.ID = self.IdBase + self._ID_STEP_
+ Values = ", ".join(str(Arg) for Arg in Args)
+ SqlCommand = "insert into %s values(%s, %s)" % (self.Table, self.ID, Values)
+ EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)
+ self.Cur.execute(SqlCommand)
+ return self.ID
+
+ ## Query table
+ #
+ # Query all records of the table
+ #
+ def Query(self):
+ SqlCommand = """select * from %s""" % self.Table
+ self.Cur.execute(SqlCommand)
+ for Rs in self.Cur:
+ EdkLogger.verbose(str(Rs))
+ TotalCount = self.GetId()
+
+ ## Drop a table
+ #
+ # Drop the table
+ #
+ def Drop(self):
+ SqlCommand = """drop table IF EXISTS %s""" % self.Table
+ try:
+ self.Cur.execute(SqlCommand)
+ except Exception as e:
+ print("An error occurred when Drop a table:", e.args[0])
+
+ ## Get count
+ #
+ # Get a count of all records of the table
+ #
+ # @retval Count: Total count of all records
+ #
+ def GetCount(self):
+ SqlCommand = """select count(ID) from %s""" % self.Table
+ Record = self.Cur.execute(SqlCommand).fetchall()
+ return Record[0][0]
+
+ def GetId(self):
+ SqlCommand = """select max(ID) from %s""" % self.Table
+ Record = self.Cur.execute(SqlCommand).fetchall()
+ Id = Record[0][0]
+ if Id is None:
+ Id = self.IdBase
+ return Id
+
+ ## Init the ID of the table
+ #
+ # Init the ID of the table
+ #
+ def InitID(self):
+ self.ID = self.GetId()
+
+ ## Exec
+ #
+ # Exec Sql Command, return result
+ #
+ # @param SqlCommand: The SqlCommand to be executed
+ #
+ # @retval RecordSet: The result after executed
+ #
+ def Exec(self, SqlCommand):
+ EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)
+ self.Cur.execute(SqlCommand)
+ RecordSet = self.Cur.fetchall()
+ return RecordSet
+
+ def SetEndFlag(self):
+ pass
+
+ def IsIntegral(self):
+ Result = self.Exec("select min(ID) from %s" % (self.Table))
+ if Result[0][0] != -1:
+ return False
+ return True
+
+ def GetAll(self):
+ return self.Exec("select * from %s where ID > 0 order by ID" % (self.Table))
+
+
+## TableDataModel
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableDataModel(Table):
+ _COLUMN_ = """
+ ID INTEGER PRIMARY KEY,
+ CrossIndex INTEGER NOT NULL,
+ Name VARCHAR NOT NULL,
+ Description VARCHAR
+ """
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor, 'DataModel')
+
+ ## Insert table
+ #
+ # Insert a record into table DataModel
+ #
+ # @param ID: ID of a ModelType
+ # @param CrossIndex: CrossIndex of a ModelType
+ # @param Name: Name of a ModelType
+ # @param Description: Description of a ModelType
+ #
+ def Insert(self, CrossIndex, Name, Description):
+ (Name, Description) = ConvertToSqlString((Name, Description))
+ return Table.Insert(self, CrossIndex, Name, Description)
+
+ ## Init table
+ #
+ # Create all default records of table DataModel
+ #
+ def InitTable(self):
+ EdkLogger.verbose("\nInitialize table DataModel started ...")
+ Count = self.GetCount()
+ if Count is not None and Count != 0:
+ return
+ for Item in DataClass.MODEL_LIST:
+ CrossIndex = Item[1]
+ Name = Item[0]
+ Description = Item[0]
+ self.Insert(CrossIndex, Name, Description)
+ EdkLogger.verbose("Initialize table DataModel ... DONE!")
+
+ ## Get CrossIndex
+ #
+ # Get a model's cross index from its name
+ #
+ # @param ModelName: Name of the model
+ # @retval CrossIndex: CrossIndex of the model
+ #
+ def GetCrossIndex(self, ModelName):
+ CrossIndex = -1
+ SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""
+ self.Cur.execute(SqlCommand)
+ for Item in self.Cur:
+ CrossIndex = Item[0]
+
+ return CrossIndex
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py
new file mode 100755
index 00000000..28d664d1
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py
@@ -0,0 +1,2089 @@
+## @file
+# This file is used to parse meta files
+#
+# Copyright (c) 2008 - 2020, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.LongFilePathOs as os
+import re
+import time
+import copy
+from hashlib import md5
+import Common.EdkLogger as EdkLogger
+import Common.GlobalData as GlobalData
+import Ecc.EccGlobalData as EccGlobalData
+import Ecc.EccToolError as EccToolError
+
+from CommonDataClass.DataClass import *
+from Common.DataType import *
+from Common.StringUtils import *
+from Common.Misc import GuidStructureStringToGuidString, CheckPcdDatum, PathClass, AnalyzePcdData
+from Common.Expression import *
+from CommonDataClass.Exceptions import *
+
+from Ecc.MetaFileWorkspace.MetaFileTable import MetaFileStorage
+from GenFds.FdfParser import FdfParser
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.LongFilePathSupport import CodecOpenLongFilePath
+
+## A decorator used to parse macro definition
+def ParseMacro(Parser):
+ def MacroParser(self):
+ Match = GlobalData.gMacroDefPattern.match(self._CurrentLine)
+ if not Match:
+ # Not 'DEFINE/EDK_GLOBAL' statement, call decorated method
+ Parser(self)
+ return
+
+ TokenList = GetSplitValueList(self._CurrentLine[Match.end(1):], TAB_EQUAL_SPLIT, 1)
+ # Syntax check
+ if not TokenList[0]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No macro name given",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ if len(TokenList) < 2:
+ TokenList.append('')
+
+ Type = Match.group(1)
+ Name, Value = TokenList
+ # Global macros can be only defined via environment variable
+ if Name in GlobalData.gGlobalDefines:
+ EdkLogger.error('Parser', FORMAT_INVALID, "%s can only be defined via environment variable" % Name,
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ # Only upper case letters, digit and '_' are allowed
+ if not GlobalData.gMacroNamePattern.match(Name):
+ EdkLogger.error('Parser', FORMAT_INVALID, "The macro name must be in the pattern [A-Z][A-Z0-9_]*",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+
+ Value = ReplaceMacro(Value, self._Macros)
+ self._ItemType = MODEL_META_DATA_DEFINE
+ # DEFINE defined macros
+ if Type == TAB_DSC_DEFINES_DEFINE:
+ if isinstance(self, DecParser):
+ if MODEL_META_DATA_HEADER in self._SectionType:
+ self._FileLocalMacros[Name] = Value
+ else:
+ for Scope in self._Scope:
+ self._SectionsMacroDict.setdefault((Scope[2], Scope[0], Scope[1]), {})[Name] = Value
+ elif self._SectionType == MODEL_META_DATA_HEADER:
+ self._FileLocalMacros[Name] = Value
+ else:
+ SectionDictKey = self._SectionType, self._Scope[0][0], self._Scope[0][1]
+ if SectionDictKey not in self._SectionsMacroDict:
+ self._SectionsMacroDict[SectionDictKey] = {}
+ SectionLocalMacros = self._SectionsMacroDict[SectionDictKey]
+ SectionLocalMacros[Name] = Value
+ # EDK_GLOBAL defined macros
+ elif not isinstance(self, DscParser):
+ EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used in .dsc file",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ elif self._SectionType != MODEL_META_DATA_HEADER:
+ EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used under [Defines] section",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ elif (Name in self._FileLocalMacros) and (self._FileLocalMacros[Name] != Value):
+ EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL defined a macro with the same name and different value as one defined by 'DEFINE'",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+
+ self._ValueList = [Type, Name, Value]
+
+ return MacroParser
+
+## Base class of parser
+#
+# This class is used for derivation purpose. The specific parser for one kind
+# type file must derive this class and implement some public interfaces.
+#
+# @param FilePath The path of platform description file
+# @param FileType The raw data of DSC file
+# @param Table Database used to retrieve module/package information
+# @param Macros Macros used for replacement in file
+# @param Owner Owner ID (for sub-section parsing)
+# @param From ID from which the data comes (for !INCLUDE directive)
+#
+class MetaFileParser(object):
+ # data type (file content) for specific file type
+ DataType = {}
+
+ # Parser objects used to implement singleton
+ MetaFiles = {}
+
+ ## Factory method
+ #
+ # One file, one parser object. This factory method makes sure that there's
+ # only one object constructed for one meta file.
+ #
+ # @param Class class object of real AutoGen class
+ # (InfParser, DecParser or DscParser)
+ # @param FilePath The path of meta file
+ # @param *args The specific class related parameters
+ # @param **kwargs The specific class related dict parameters
+ #
+ def __new__(Class, FilePath, *args, **kwargs):
+ if FilePath in Class.MetaFiles:
+ return Class.MetaFiles[FilePath]
+ else:
+ ParserObject = super(MetaFileParser, Class).__new__(Class)
+ Class.MetaFiles[FilePath] = ParserObject
+ return ParserObject
+
+ ## Constructor of MetaFileParser
+ #
+ # Initialize object of MetaFileParser
+ #
+ # @param FilePath The path of platform description file
+ # @param FileType The raw data of DSC file
+ # @param Table Database used to retrieve module/package information
+ # @param Macros Macros used for replacement in file
+ # @param Owner Owner ID (for sub-section parsing)
+ # @param From ID from which the data comes (for !INCLUDE directive)
+ #
+ def __init__(self, FilePath, FileType, Table, Owner=-1, From=-1):
+ self._Table = Table
+ self._RawTable = Table
+ self._FileType = FileType
+ self.MetaFile = FilePath
+ self._Defines = {}
+ self._FileLocalMacros = {}
+ self._SectionsMacroDict = {}
+
+ # for recursive parsing
+ self._Owner = [Owner]
+ self._From = From
+
+ # parsr status for parsing
+ self._ValueList = ['', '', '', '', '']
+ self._Scope = []
+ self._LineIndex = 0
+ self._CurrentLine = ''
+ self._SectionType = MODEL_UNKNOWN
+ self._SectionName = ''
+ self._InSubsection = False
+ self._SubsectionType = MODEL_UNKNOWN
+ self._SubsectionName = ''
+ self._ItemType = MODEL_UNKNOWN
+ self._LastItem = -1
+ self._Enabled = 0
+ self._Finished = False
+ self._PostProcessed = False
+ # Different version of meta-file has different way to parse.
+ self._Version = 0
+ # UNI object and extra UNI object
+ self._UniObj = None
+ self._UniExtraObj = None
+
+ ## Store the parsed data in table
+ def _Store(self, *Args):
+ return self._Table.Insert(*Args)
+
+ ## Virtual method for starting parse
+ def Start(self):
+ raise NotImplementedError
+
+ ## Notify a post-process is needed
+ def DoPostProcess(self):
+ self._PostProcessed = False
+
+ ## Set parsing complete flag in both class and table
+ def _Done(self):
+ self._Finished = True
+ ## Do not set end flag when processing included files
+ if self._From == -1:
+ self._Table.SetEndFlag()
+
+ def _PostProcess(self):
+ self._PostProcessed = True
+
+ ## Get the parse complete flag
+ def _GetFinished(self):
+ return self._Finished
+
+ ## Set the complete flag
+ def _SetFinished(self, Value):
+ self._Finished = Value
+
+ ## Use [] style to query data in table, just for readability
+ #
+ # DataInfo = [data_type, scope1(arch), scope2(platform/moduletype)]
+ #
+ def __getitem__(self, DataInfo):
+ if not isinstance(DataInfo, type(())):
+ DataInfo = (DataInfo,)
+
+ # Parse the file first, if necessary
+ if not self._Finished:
+ if self._RawTable.IsIntegrity():
+ self._Finished = True
+ else:
+ self._Table = self._RawTable
+ self._PostProcessed = False
+ self.Start()
+
+ # No specific ARCH or Platform given, use raw data
+ if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] is None):
+ return self._RawTable.Query(*DataInfo)
+
+ # Do post-process if necessary
+ if not self._PostProcessed:
+ self._PostProcess()
+
+ return self._Table.Query(*DataInfo)
+
+ ## Data parser for the common format in different type of file
+ #
+ # The common format in the meatfile is like
+ #
+ # xxx1 | xxx2 | xxx3
+ #
+ @ParseMacro
+ def _CommonParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ self._ValueList[0:len(TokenList)] = TokenList
+
+ ## Data parser for the format in which there's path
+ #
+ # Only path can have macro used. So we need to replace them before use.
+ #
+ @ParseMacro
+ def _PathParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ self._ValueList[0:len(TokenList)] = TokenList
+ # Don't do macro replacement for dsc file at this point
+ if not isinstance(self, DscParser):
+ Macros = self._Macros
+ self._ValueList = [ReplaceMacro(Value, Macros) for Value in self._ValueList]
+
+ ## Skip unsupported data
+ def _Skip(self):
+ if self._SectionName == TAB_USER_EXTENSIONS.upper() and self._CurrentLine.upper().endswith('.UNI'):
+ if EccGlobalData.gConfig.UniCheckHelpInfo == '1' or EccGlobalData.gConfig.UniCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ ExtraUni = self._CurrentLine.strip()
+ ExtraUniFile = os.path.join(os.path.dirname(self.MetaFile), ExtraUni)
+ IsModuleUni = self.MetaFile.upper().endswith('.INF')
+ self._UniExtraObj = UniParser(ExtraUniFile, IsExtraUni=True, IsModuleUni=IsModuleUni)
+ self._UniExtraObj.Start()
+ else:
+ EdkLogger.warn("Parser", "Unrecognized content", File=self.MetaFile,
+ Line=self._LineIndex + 1, ExtraData=self._CurrentLine);
+ self._ValueList[0:1] = [self._CurrentLine]
+
+ ## Section header parser
+ #
+ # The section header is always in following format:
+ #
+ # [section_name.arch<.platform|module_type>]
+ #
+ def _SectionHeaderParser(self):
+ self._Scope = []
+ self._SectionName = ''
+ ArchList = set()
+ for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
+ if Item == '':
+ continue
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ # different section should not mix in one section
+ if self._SectionName != '' and self._SectionName != ItemList[0].upper():
+ EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section",
+ File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+ self._SectionName = ItemList[0].upper()
+ if self._SectionName in self.DataType:
+ self._SectionType = self.DataType[self._SectionName]
+ else:
+ self._SectionType = MODEL_UNKNOWN
+ EdkLogger.warn("Parser", "Unrecognized section", File=self.MetaFile,
+ Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+ # S1 is always Arch
+ if len(ItemList) > 1:
+ S1 = ItemList[1].upper()
+ else:
+ S1 = 'COMMON'
+ ArchList.add(S1)
+ # S2 may be Platform or ModuleType
+ if len(ItemList) > 2:
+ S2 = ItemList[2].upper()
+ else:
+ S2 = 'COMMON'
+ self._Scope.append([S1, S2])
+
+ # 'COMMON' must not be used with specific ARCHs at the same section
+ if 'COMMON' in ArchList and len(ArchList) > 1:
+ EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
+ File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+ # If the section information is needed later, it should be stored in database
+ self._ValueList[0] = self._SectionName
+
+ ## [defines] section parser
+ @ParseMacro
+ def _DefineParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ self._ValueList[1:len(TokenList)] = TokenList
+ if not self._ValueList[1]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No name specified",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ if not self._ValueList[2]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+
+ self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
+ Name, Value = self._ValueList[1], self._ValueList[2]
+ # Sometimes, we need to make differences between EDK and EDK2 modules
+ if Name == 'INF_VERSION':
+ try:
+ self._Version = int(Value, 0)
+ except:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Invalid version number",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ elif Name == 'MODULE_UNI_FILE':
+ UniFile = os.path.join(os.path.dirname(self.MetaFile), Value)
+ if os.path.exists(UniFile):
+ self._UniObj = UniParser(UniFile, IsExtraUni=False, IsModuleUni=True)
+ self._UniObj.Start()
+ else:
+ EdkLogger.error('Parser', FILE_NOT_FOUND, "Module UNI file %s is missing." % Value,
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1,
+ RaiseError=False)
+ elif Name == 'PACKAGE_UNI_FILE':
+ UniFile = os.path.join(os.path.dirname(self.MetaFile), Value)
+ if os.path.exists(UniFile):
+ self._UniObj = UniParser(UniFile, IsExtraUni=False, IsModuleUni=False)
+
+ if isinstance(self, InfParser) and self._Version < 0x00010005:
+ # EDK module allows using defines as macros
+ self._FileLocalMacros[Name] = Value
+ self._Defines[Name] = Value
+
+ ## [BuildOptions] section parser
+ @ParseMacro
+ def _BuildOptionParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ TokenList2 = GetSplitValueList(TokenList[0], ':', 1)
+ if len(TokenList2) == 2:
+ self._ValueList[0] = TokenList2[0] # toolchain family
+ self._ValueList[1] = TokenList2[1] # keys
+ else:
+ self._ValueList[1] = TokenList[0]
+ if len(TokenList) == 2 and not isinstance(self, DscParser): # value
+ self._ValueList[2] = ReplaceMacro(TokenList[1], self._Macros)
+
+ if self._ValueList[1].count('_') != 4:
+ EdkLogger.error(
+ 'Parser',
+ FORMAT_INVALID,
+ "'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1],
+ ExtraData=self._CurrentLine,
+ File=self.MetaFile,
+ Line=self._LineIndex+1
+ )
+
+ def _GetMacros(self):
+ Macros = {}
+ Macros.update(self._FileLocalMacros)
+ Macros.update(self._GetApplicableSectionMacro())
+ return Macros
+
+
+ ## Get section Macros that are applicable to current line, which may come from other sections
+ ## that share the same name while scope is wider
+ def _GetApplicableSectionMacro(self):
+ Macros = {}
+ for Scope1, Scope2 in [("COMMON", "COMMON"), ("COMMON", self._Scope[0][1]),
+ (self._Scope[0][0], "COMMON"), (self._Scope[0][0], self._Scope[0][1])]:
+ if (self._SectionType, Scope1, Scope2) in self._SectionsMacroDict:
+ Macros.update(self._SectionsMacroDict[(self._SectionType, Scope1, Scope2)])
+ return Macros
+
+ _SectionParser = {}
+ Finished = property(_GetFinished, _SetFinished)
+ _Macros = property(_GetMacros)
+
+
+## INF file parser class
+#
+# @param FilePath The path of platform description file
+# @param FileType The raw data of DSC file
+# @param Table Database used to retrieve module/package information
+# @param Macros Macros used for replacement in file
+#
+class InfParser(MetaFileParser):
+ # INF file supported data types (one type per section)
+ DataType = {
+ TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+ TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
+ TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
+ TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE,
+ TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC,
+ TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_DEPEX.upper() : MODEL_EFI_DEPEX,
+ TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+ }
+
+ ## Constructor of InfParser
+ #
+ # Initialize object of InfParser
+ #
+ # @param FilePath The path of module description file
+ # @param FileType The raw data of DSC file
+ # @param Table Database used to retrieve module/package information
+ # @param Macros Macros used for replacement in file
+ #
+ def __init__(self, FilePath, FileType, Table):
+ # prevent re-initialization
+ if hasattr(self, "_Table"):
+ return
+ MetaFileParser.__init__(self, FilePath, FileType, Table)
+ self.TblFile = EccGlobalData.gDb.TblFile
+ self.FileID = -1
+
+ ## Parser starter
+ def Start(self):
+ NmakeLine = ''
+ Content = ''
+ Usage = ''
+ try:
+ Content = open(str(self.MetaFile), 'r').readlines()
+ except:
+ EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
+ #
+ # Insert a record for file
+ #
+ Filename = NormPath(self.MetaFile)
+ FileID = self.TblFile.GetFileId(Filename)
+ if FileID:
+ self.FileID = FileID
+ else:
+ self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_INF)
+
+ # parse the file line by line
+ IsFindBlockComment = False
+
+ for Index in range(0, len(Content)):
+ if self._SectionType in [MODEL_EFI_GUID,
+ MODEL_EFI_PROTOCOL,
+ MODEL_EFI_PPI,
+ MODEL_PCD_FIXED_AT_BUILD,
+ MODEL_PCD_PATCHABLE_IN_MODULE,
+ MODEL_PCD_FEATURE_FLAG,
+ MODEL_PCD_DYNAMIC_EX,
+ MODEL_PCD_DYNAMIC]:
+ Line = Content[Index].strip()
+ if Line.startswith(TAB_SPECIAL_COMMENT):
+ Usage += ' ' + Line[Line.find(TAB_SPECIAL_COMMENT):]
+ continue
+ elif Line.startswith(TAB_COMMENT_SPLIT):
+ continue
+ elif Line.find(TAB_COMMENT_SPLIT) > 0:
+ Usage += ' ' + Line[Line.find(TAB_COMMENT_SPLIT):]
+ Line = Line[:Line.find(TAB_COMMENT_SPLIT)]
+ else:
+ # skip empty, commented, block commented lines
+ Line = CleanString(Content[Index], AllowCppStyleComment=True)
+ Usage = ''
+ NextLine = ''
+ if Index + 1 < len(Content):
+ NextLine = CleanString(Content[Index + 1])
+ if Line == '':
+ continue
+ if Line.find(DataType.TAB_COMMENT_EDK_START) > -1:
+ IsFindBlockComment = True
+ continue
+ if Line.find(DataType.TAB_COMMENT_EDK_END) > -1:
+ IsFindBlockComment = False
+ continue
+ if IsFindBlockComment:
+ continue
+
+ self._LineIndex = Index
+ self._CurrentLine = Line
+
+ # section header
+ if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
+ self._SectionHeaderParser()
+ # Check invalid sections
+ if self._Version < 0x00010005:
+ if self._SectionType in [MODEL_META_DATA_BUILD_OPTION,
+ MODEL_EFI_LIBRARY_CLASS,
+ MODEL_META_DATA_PACKAGE,
+ MODEL_PCD_FIXED_AT_BUILD,
+ MODEL_PCD_PATCHABLE_IN_MODULE,
+ MODEL_PCD_FEATURE_FLAG,
+ MODEL_PCD_DYNAMIC_EX,
+ MODEL_PCD_DYNAMIC,
+ MODEL_EFI_GUID,
+ MODEL_EFI_PROTOCOL,
+ MODEL_EFI_PPI,
+ MODEL_META_DATA_USER_EXTENSION]:
+ EdkLogger.error('Parser', FORMAT_INVALID,
+ "Section [%s] is not allowed in inf file without version" % (self._SectionName),
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ elif self._SectionType in [MODEL_EFI_INCLUDE,
+ MODEL_EFI_LIBRARY_INSTANCE,
+ MODEL_META_DATA_NMAKE]:
+ EdkLogger.error('Parser', FORMAT_INVALID,
+ "Section [%s] is not allowed in inf file with version 0x%08x" % (self._SectionName, self._Version),
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ continue
+ # merge two lines specified by '\' in section NMAKE
+ elif self._SectionType == MODEL_META_DATA_NMAKE:
+ if Line[-1] == '\\':
+ if NextLine == '':
+ self._CurrentLine = NmakeLine + Line[0:-1]
+ NmakeLine = ''
+ else:
+ if NextLine[0] == TAB_SECTION_START and NextLine[-1] == TAB_SECTION_END:
+ self._CurrentLine = NmakeLine + Line[0:-1]
+ NmakeLine = ''
+ else:
+ NmakeLine = NmakeLine + ' ' + Line[0:-1]
+ continue
+ else:
+ self._CurrentLine = NmakeLine + Line
+ NmakeLine = ''
+
+ # section content
+ self._ValueList = ['', '', '']
+ # parse current line, result will be put in self._ValueList
+ self._SectionParser[self._SectionType](self)
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
+ self._ItemType = -1
+ continue
+ #
+ # Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1,
+ # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
+ #
+ self._ValueList[0] = self._ValueList[0].replace('/', '\\')
+ Usage = Usage.strip()
+ for Arch, Platform in self._Scope:
+ self._Store(self._SectionType,
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ Arch,
+ Platform,
+ self._Owner[-1],
+ self.FileID,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ 0,
+ Usage
+ )
+ Usage = ''
+ if IsFindBlockComment:
+ EdkLogger.error("Parser", FORMAT_INVALID, "Open block comments (starting with /*) are expected to end with */",
+ File=self.MetaFile)
+ self._Done()
+
+ ## Data parser for the format in which there's path
+ #
+ # Only path can have macro used. So we need to replace them before use.
+ #
+ def _IncludeParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ self._ValueList[0:len(TokenList)] = TokenList
+ Macros = self._Macros
+ if Macros:
+ for Index in range(0, len(self._ValueList)):
+ Value = self._ValueList[Index]
+ if not Value:
+ continue
+
+ self._ValueList[Index] = ReplaceMacro(Value, Macros)
+
+ ## Parse [Sources] section
+ #
+ # Only path can have macro used. So we need to replace them before use.
+ #
+ @ParseMacro
+ def _SourceFileParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ self._ValueList[0:len(TokenList)] = TokenList
+ Macros = self._Macros
+ # For Acpi tables, remove macro like ' TABLE_NAME=Sata1'
+ if 'COMPONENT_TYPE' in Macros:
+ if self._Defines['COMPONENT_TYPE'].upper() == 'ACPITABLE':
+ self._ValueList[0] = GetSplitValueList(self._ValueList[0], ' ', 1)[0]
+ if self._Defines['BASE_NAME'] == 'Microcode':
+ pass
+ self._ValueList = [ReplaceMacro(Value, Macros) for Value in self._ValueList]
+
+ ## Parse [Binaries] section
+ #
+ # Only path can have macro used. So we need to replace them before use.
+ #
+ @ParseMacro
+ def _BinaryFileParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 2)
+ if len(TokenList) < 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No file type or path specified",
+ ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if not TokenList[0]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No file type specified",
+ ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if not TokenList[1]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No file path specified",
+ ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ self._ValueList[0:len(TokenList)] = TokenList
+ self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros)
+
+ ## [nmake] section parser (Edk.x style only)
+ def _NmakeParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ self._ValueList[0:len(TokenList)] = TokenList
+ # remove macros
+ self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros)
+ # remove self-reference in macro setting
+ #self._ValueList[1] = ReplaceMacro(self._ValueList[1], {self._ValueList[0]:''})
+
+ ## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser
+ @ParseMacro
+ def _PcdParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
+ ValueList = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ if len(ValueList) != 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Illegal token space GUID and PCD name format",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ self._ValueList[0:1] = ValueList
+ if len(TokenList) > 1:
+ self._ValueList[2] = TokenList[1]
+ if self._ValueList[0] == '' or self._ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+
+ # if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0.
+ if self._ValueList[2] != '':
+ InfPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
+ if InfPcdValueList[0] in ['True', 'true', 'TRUE']:
+ self._ValueList[2] = TokenList[1].replace(InfPcdValueList[0], '1', 1);
+ elif InfPcdValueList[0] in ['False', 'false', 'FALSE']:
+ self._ValueList[2] = TokenList[1].replace(InfPcdValueList[0], '0', 1);
+
+ ## [depex] section parser
+ @ParseMacro
+ def _DepexParser(self):
+ self._ValueList[0:1] = [self._CurrentLine]
+
+ _SectionParser = {
+ MODEL_UNKNOWN : MetaFileParser._Skip,
+ MODEL_META_DATA_HEADER : MetaFileParser._DefineParser,
+ MODEL_META_DATA_BUILD_OPTION : MetaFileParser._BuildOptionParser,
+ MODEL_EFI_INCLUDE : _IncludeParser, # for Edk.x modules
+ MODEL_EFI_LIBRARY_INSTANCE : MetaFileParser._CommonParser, # for Edk.x modules
+ MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser,
+ MODEL_META_DATA_PACKAGE : MetaFileParser._PathParser,
+ MODEL_META_DATA_NMAKE : _NmakeParser, # for Edk.x modules
+ MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
+ MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
+ MODEL_PCD_FEATURE_FLAG : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX : _PcdParser,
+ MODEL_PCD_DYNAMIC : _PcdParser,
+ MODEL_EFI_SOURCE_FILE : _SourceFileParser,
+ MODEL_EFI_GUID : MetaFileParser._CommonParser,
+ MODEL_EFI_PROTOCOL : MetaFileParser._CommonParser,
+ MODEL_EFI_PPI : MetaFileParser._CommonParser,
+ MODEL_EFI_DEPEX : _DepexParser,
+ MODEL_EFI_BINARY_FILE : _BinaryFileParser,
+ MODEL_META_DATA_USER_EXTENSION : MetaFileParser._Skip,
+ }
+
+## DSC file parser class
+#
+# @param FilePath The path of platform description file
+# @param FileType The raw data of DSC file
+# @param Table Database used to retrieve module/package information
+# @param Macros Macros used for replacement in file
+# @param Owner Owner ID (for sub-section parsing)
+# @param From ID from which the data comes (for !INCLUDE directive)
+#
+class DscParser(MetaFileParser):
+ # DSC file supported data types (one type per section)
+ DataType = {
+ TAB_SKUIDS.upper() : MODEL_EFI_SKU_ID,
+ TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
+ TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_DEFAULT,
+ TAB_PCDS_DYNAMIC_HII_NULL.upper() : MODEL_PCD_DYNAMIC_HII,
+ TAB_PCDS_DYNAMIC_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_VPD,
+ TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_EX_DEFAULT,
+ TAB_PCDS_DYNAMIC_EX_HII_NULL.upper() : MODEL_PCD_DYNAMIC_EX_HII,
+ TAB_PCDS_DYNAMIC_EX_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_EX_VPD,
+ TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
+ TAB_DSC_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
+ TAB_DSC_DEFINES_EDKGLOBAL : MODEL_META_DATA_GLOBAL_DEFINE,
+ TAB_INCLUDE.upper() : MODEL_META_DATA_INCLUDE,
+ TAB_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+ TAB_IF_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
+ TAB_IF_N_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF,
+ TAB_ELSE_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF,
+ TAB_ELSE.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE,
+ TAB_END_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF,
+ TAB_ERROR.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR,
+ }
+
+ # Valid names in define section
+ DefineKeywords = [
+ "DSC_SPECIFICATION",
+ "PLATFORM_NAME",
+ "PLATFORM_GUID",
+ "PLATFORM_VERSION",
+ "SKUID_IDENTIFIER",
+ "PCD_INFO_GENERATION",
+ "SUPPORTED_ARCHITECTURES",
+ "BUILD_TARGETS",
+ "OUTPUT_DIRECTORY",
+ "FLASH_DEFINITION",
+ "BUILD_NUMBER",
+ "RFC_LANGUAGES",
+ "ISO_LANGUAGES",
+ "TIME_STAMP_FILE",
+ "VPD_TOOL_GUID",
+ "FIX_LOAD_TOP_MEMORY_ADDRESS"
+ ]
+
+ SubSectionDefineKeywords = [
+ "FILE_GUID"
+ ]
+
+ SymbolPattern = ValueExpression.SymbolPattern
+
+ ## Constructor of DscParser
+ #
+ # Initialize object of DscParser
+ #
+ # @param FilePath The path of platform description file
+ # @param FileType The raw data of DSC file
+ # @param Table Database used to retrieve module/package information
+ # @param Macros Macros used for replacement in file
+ # @param Owner Owner ID (for sub-section parsing)
+ # @param From ID from which the data comes (for !INCLUDE directive)
+ #
+ def __init__(self, FilePath, FileType, Table, Owner=-1, From=-1):
+ # prevent re-initialization
+ if hasattr(self, "_Table"):
+ return
+ MetaFileParser.__init__(self, FilePath, FileType, Table, Owner, From)
+ self._Version = 0x00010005 # Only EDK2 dsc file is supported
+ # to store conditional directive evaluation result
+ self._DirectiveStack = []
+ self._DirectiveEvalStack = []
+ self._Enabled = 1
+
+ # Final valid replacable symbols
+ self._Symbols = {}
+ #
+ # Map the ID between the original table and new table to track
+ # the owner item
+ #
+ self._IdMapping = {-1:-1}
+
+ self.TblFile = EccGlobalData.gDb.TblFile
+ self.FileID = -1
+
+ ## Parser starter
+ def Start(self):
+ Content = ''
+ try:
+ Content = open(str(self.MetaFile.Path), 'r').readlines()
+ except:
+ EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
+ #
+ # Insert a record for file
+ #
+ Filename = NormPath(self.MetaFile.Path)
+ FileID = self.TblFile.GetFileId(Filename)
+ if FileID:
+ self.FileID = FileID
+ else:
+ self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DSC)
+
+
+ for Index in range(0, len(Content)):
+ Line = CleanString(Content[Index])
+ # skip empty line
+ if Line == '':
+ continue
+
+ self._CurrentLine = Line
+ self._LineIndex = Index
+ if self._InSubsection and self._Owner[-1] == -1:
+ self._Owner.append(self._LastItem)
+
+ # section header
+ if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
+ self._SectionType = MODEL_META_DATA_SECTION_HEADER
+ # subsection ending
+ elif Line[0] == '}' and self._InSubsection:
+ self._InSubsection = False
+ self._SubsectionType = MODEL_UNKNOWN
+ self._SubsectionName = ''
+ self._Owner[-1] = -1
+ continue
+ # subsection header
+ elif Line[0] == TAB_OPTION_START and Line[-1] == TAB_OPTION_END:
+ self._SubsectionType = MODEL_META_DATA_SUBSECTION_HEADER
+ # directive line
+ elif Line[0] == '!':
+ self._DirectiveParser()
+ continue
+
+ if self._InSubsection:
+ SectionType = self._SubsectionType
+ else:
+ SectionType = self._SectionType
+ self._ItemType = SectionType
+
+ self._ValueList = ['', '', '']
+ self._SectionParser[SectionType](self)
+ if self._ValueList is None:
+ continue
+ #
+ # Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
+ # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
+ #
+ for Arch, ModuleType in self._Scope:
+ self._LastItem = self._Store(
+ self._ItemType,
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ Arch,
+ ModuleType,
+ self._Owner[-1],
+ self.FileID,
+ self._From,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ self._Enabled
+ )
+
+ if self._DirectiveStack:
+ Type, Line, Text = self._DirectiveStack[-1]
+ EdkLogger.error('Parser', FORMAT_INVALID, "No matching '!endif' found",
+ ExtraData=Text, File=self.MetaFile, Line=Line)
+ self._Done()
+
+ ## <subsection_header> parser
+ def _SubsectionHeaderParser(self):
+ self._SubsectionName = self._CurrentLine[1:-1].upper()
+ if self._SubsectionName in self.DataType:
+ self._SubsectionType = self.DataType[self._SubsectionName]
+ else:
+ self._SubsectionType = MODEL_UNKNOWN
+ EdkLogger.warn("Parser", "Unrecognized sub-section", File=self.MetaFile,
+ Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+ self._ValueList[0] = self._SubsectionName
+
+ ## Directive statement parser
+ def _DirectiveParser(self):
+ self._ValueList = ['', '', '']
+ TokenList = GetSplitValueList(self._CurrentLine, ' ', 1)
+ self._ValueList[0:len(TokenList)] = TokenList
+
+ # Syntax check
+ DirectiveName = self._ValueList[0].upper()
+ if DirectiveName not in self.DataType:
+ EdkLogger.error("Parser", FORMAT_INVALID, "Unknown directive [%s]" % DirectiveName,
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if DirectiveName in ['!IF', '!IFDEF', '!INCLUDE', '!IFNDEF', '!ELSEIF'] and self._ValueList[1] == '':
+ EdkLogger.error("Parser", FORMAT_INVALID, "Missing expression",
+ File=self.MetaFile, Line=self._LineIndex+1,
+ ExtraData=self._CurrentLine)
+
+ ItemType = self.DataType[DirectiveName]
+ if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF:
+ # Remove all directives between !if and !endif, including themselves
+ while self._DirectiveStack:
+ # Remove any !else or !elseif
+ DirectiveInfo = self._DirectiveStack.pop()
+ if DirectiveInfo[0] in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
+ break
+ else:
+ EdkLogger.error("Parser", FORMAT_INVALID, "Redundant '!endif'",
+ File=self.MetaFile, Line=self._LineIndex+1,
+ ExtraData=self._CurrentLine)
+ elif ItemType != MODEL_META_DATA_INCLUDE:
+ # Break if there's a !else is followed by a !elseif
+ if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF and \
+ self._DirectiveStack and \
+ self._DirectiveStack[-1][0] == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE:
+ EdkLogger.error("Parser", FORMAT_INVALID, "'!elseif' after '!else'",
+ File=self.MetaFile, Line=self._LineIndex+1,
+ ExtraData=self._CurrentLine)
+ self._DirectiveStack.append((ItemType, self._LineIndex+1, self._CurrentLine))
+ elif self._From > 0:
+ EdkLogger.error('Parser', FORMAT_INVALID,
+ "No '!include' allowed in included file",
+ ExtraData=self._CurrentLine, File=self.MetaFile,
+ Line=self._LineIndex+1)
+
+ #
+ # Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
+ # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
+ #
+ self._LastItem = self._Store(
+ ItemType,
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ 'COMMON',
+ 'COMMON',
+ self._Owner[-1],
+ self.FileID,
+ self._From,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ 0
+ )
+
+ ## [defines] section parser
+ @ParseMacro
+ def _DefineParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ self._ValueList[1:len(TokenList)] = TokenList
+
+ # Syntax check
+ if not self._ValueList[1]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No name specified",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ if not self._ValueList[2]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ if (not self._ValueList[1] in self.DefineKeywords and
+ (self._InSubsection and self._ValueList[1] not in self.SubSectionDefineKeywords)):
+ EdkLogger.error('Parser', FORMAT_INVALID,
+ "Unknown keyword found: %s. "
+ "If this is a macro you must "
+ "add it as a DEFINE in the DSC" % self._ValueList[1],
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ self._Defines[self._ValueList[1]] = self._ValueList[2]
+ self._ItemType = self.DataType[TAB_DSC_DEFINES.upper()]
+
+ @ParseMacro
+ def _SkuIdParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ if len(TokenList) != 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '<Integer>|<UiName>'",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ self._ValueList[0:len(TokenList)] = TokenList
+
+ ## Parse Edk style of library modules
+ def _LibraryInstanceParser(self):
+ self._ValueList[0] = self._CurrentLine
+
+ ## PCD sections parser
+ #
+ # [PcdsFixedAtBuild]
+ # [PcdsPatchableInModule]
+ # [PcdsFeatureFlag]
+ # [PcdsDynamicEx
+ # [PcdsDynamicExDefault]
+ # [PcdsDynamicExVpd]
+ # [PcdsDynamicExHii]
+ # [PcdsDynamic]
+ # [PcdsDynamicDefault]
+ # [PcdsDynamicVpd]
+ # [PcdsDynamicHii]
+ #
+ @ParseMacro
+ def _PcdParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
+ self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ if len(TokenList) == 2:
+ self._ValueList[2] = TokenList[1]
+ if self._ValueList[0] == '' or self._ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if self._ValueList[2] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No PCD value given",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ # if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0.
+ DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
+ if DscPcdValueList[0] in ['True', 'true', 'TRUE']:
+ self._ValueList[2] = TokenList[1].replace(DscPcdValueList[0], '1', 1);
+ elif DscPcdValueList[0] in ['False', 'false', 'FALSE']:
+ self._ValueList[2] = TokenList[1].replace(DscPcdValueList[0], '0', 1);
+
+ ## [components] section parser
+ @ParseMacro
+ def _ComponentParser(self):
+ if self._CurrentLine[-1] == '{':
+ self._ValueList[0] = self._CurrentLine[0:-1].strip()
+ self._InSubsection = True
+ else:
+ self._ValueList[0] = self._CurrentLine
+
+ ## [LibraryClasses] section
+ @ParseMacro
+ def _LibraryClassParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ if len(TokenList) < 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No library class or instance specified",
+ ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if TokenList[0] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No library class specified",
+ ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if TokenList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No library instance specified",
+ ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+
+ self._ValueList[0:len(TokenList)] = TokenList
+
+
+ ## [BuildOptions] section parser
+ @ParseMacro
+ def _BuildOptionParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ TokenList2 = GetSplitValueList(TokenList[0], ':', 1)
+ if len(TokenList2) == 2:
+ self._ValueList[0] = TokenList2[0] # toolchain family
+ self._ValueList[1] = TokenList2[1] # keys
+ else:
+ self._ValueList[1] = TokenList[0]
+ if len(TokenList) == 2: # value
+ self._ValueList[2] = TokenList[1]
+
+ if self._ValueList[1].count('_') != 4:
+ EdkLogger.error(
+ 'Parser',
+ FORMAT_INVALID,
+ "'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1],
+ ExtraData=self._CurrentLine,
+ File=self.MetaFile,
+ Line=self._LineIndex+1
+ )
+
+ ## Override parent's method since we'll do all macro replacements in parser
+ def _GetMacros(self):
+ Macros = dict( [('ARCH', 'IA32'), ('FAMILY', TAB_COMPILER_MSFT), ('TOOL_CHAIN_TAG', 'VS2008x86'), ('TARGET', 'DEBUG')])
+ Macros.update(self._FileLocalMacros)
+ Macros.update(self._GetApplicableSectionMacro())
+ Macros.update(GlobalData.gEdkGlobal)
+ Macros.update(GlobalData.gPlatformDefines)
+ Macros.update(GlobalData.gCommandLineDefines)
+ # PCD cannot be referenced in macro definition
+ if self._ItemType not in [MODEL_META_DATA_DEFINE, MODEL_META_DATA_GLOBAL_DEFINE]:
+ Macros.update(self._Symbols)
+ return Macros
+
+ def _PostProcess(self):
+ Processer = {
+ MODEL_META_DATA_SECTION_HEADER : self.__ProcessSectionHeader,
+ MODEL_META_DATA_SUBSECTION_HEADER : self.__ProcessSubsectionHeader,
+ MODEL_META_DATA_HEADER : self.__ProcessDefine,
+ MODEL_META_DATA_DEFINE : self.__ProcessDefine,
+ MODEL_META_DATA_GLOBAL_DEFINE : self.__ProcessDefine,
+ MODEL_META_DATA_INCLUDE : self.__ProcessDirective,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IF : self.__ProcessDirective,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE : self.__ProcessDirective,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF : self.__ProcessDirective,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF : self.__ProcessDirective,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF : self.__ProcessDirective,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF : self.__ProcessDirective,
+ MODEL_EFI_SKU_ID : self.__ProcessSkuId,
+ MODEL_EFI_LIBRARY_INSTANCE : self.__ProcessLibraryInstance,
+ MODEL_EFI_LIBRARY_CLASS : self.__ProcessLibraryClass,
+ MODEL_PCD_FIXED_AT_BUILD : self.__ProcessPcd,
+ MODEL_PCD_PATCHABLE_IN_MODULE : self.__ProcessPcd,
+ MODEL_PCD_FEATURE_FLAG : self.__ProcessPcd,
+ MODEL_PCD_DYNAMIC_DEFAULT : self.__ProcessPcd,
+ MODEL_PCD_DYNAMIC_HII : self.__ProcessPcd,
+ MODEL_PCD_DYNAMIC_VPD : self.__ProcessPcd,
+ MODEL_PCD_DYNAMIC_EX_DEFAULT : self.__ProcessPcd,
+ MODEL_PCD_DYNAMIC_EX_HII : self.__ProcessPcd,
+ MODEL_PCD_DYNAMIC_EX_VPD : self.__ProcessPcd,
+ MODEL_META_DATA_COMPONENT : self.__ProcessComponent,
+ MODEL_META_DATA_BUILD_OPTION : self.__ProcessBuildOption,
+ MODEL_UNKNOWN : self._Skip,
+ MODEL_META_DATA_USER_EXTENSION : self._Skip,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR : self._Skip,
+ }
+
+ self._RawTable = self._Table
+ self._Table = MetaFileStorage(self._RawTable.Cur, self.MetaFile, MODEL_FILE_DSC, True)
+ self._DirectiveStack = []
+ self._DirectiveEvalStack = []
+ self._FileWithError = self.MetaFile
+ self._FileLocalMacros = {}
+ self._SectionsMacroDict = {}
+ GlobalData.gPlatformDefines = {}
+
+ # Get all macro and PCD which has straitforward value
+ self.__RetrievePcdValue()
+ self._Content = self._RawTable.GetAll()
+ self._ContentIndex = 0
+ while self._ContentIndex < len(self._Content) :
+ Id, self._ItemType, V1, V2, V3, S1, S2, Owner, BelongsToFile, self._From, \
+ LineStart, ColStart, LineEnd, ColEnd, Enabled = self._Content[self._ContentIndex]
+
+ if self._From < 0:
+ self._FileWithError = self.MetaFile
+
+ self._ContentIndex += 1
+
+ self._Scope = [[S1, S2]]
+ self._LineIndex = LineStart - 1
+ self._ValueList = [V1, V2, V3]
+
+ try:
+ Processer[self._ItemType]()
+ except EvaluationException as Excpt:
+ #
+ # Only catch expression evaluation error here. We need to report
+ # the precise number of line on which the error occurred
+ #
+ pass
+# EdkLogger.error('Parser', FORMAT_INVALID, "Invalid expression: %s" % str(Excpt),
+# File=self._FileWithError, ExtraData=' '.join(self._ValueList),
+# Line=self._LineIndex+1)
+ except MacroException as Excpt:
+ EdkLogger.error('Parser', FORMAT_INVALID, str(Excpt),
+ File=self._FileWithError, ExtraData=' '.join(self._ValueList),
+ Line=self._LineIndex+1)
+
+ if self._ValueList is None:
+ continue
+
+ NewOwner = self._IdMapping.get(Owner, -1)
+ self._Enabled = int((not self._DirectiveEvalStack) or (False not in self._DirectiveEvalStack))
+ self._LastItem = self._Store(
+ self._ItemType,
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ S1,
+ S2,
+ NewOwner,
+ BelongsToFile,
+ self._From,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ self._Enabled
+ )
+ self._IdMapping[Id] = self._LastItem
+
+ RecordList = self._Table.GetAll()
+
+ self._RawTable.Drop()
+ self._Table.Drop()
+ for Record in RecordList:
+ EccGlobalData.gDb.TblDsc.Insert(Record[1], Record[2], Record[3], Record[4], Record[5], Record[6], Record[7], Record[8], Record[9], Record[10], Record[11], Record[12], Record[13], Record[14])
+ GlobalData.gPlatformDefines.update(self._FileLocalMacros)
+ self._PostProcessed = True
+ self._Content = None
+
+ def __ProcessSectionHeader(self):
+ self._SectionName = self._ValueList[0]
+ if self._SectionName in self.DataType:
+ self._SectionType = self.DataType[self._SectionName]
+ else:
+ self._SectionType = MODEL_UNKNOWN
+
+ def __ProcessSubsectionHeader(self):
+ self._SubsectionName = self._ValueList[0]
+ if self._SubsectionName in self.DataType:
+ self._SubsectionType = self.DataType[self._SubsectionName]
+ else:
+ self._SubsectionType = MODEL_UNKNOWN
+
+ def __RetrievePcdValue(self):
+ Records = self._RawTable.Query(MODEL_PCD_FEATURE_FLAG, BelongsToItem=-1.0)
+ for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, ID, Line in Records:
+ Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
+ # Only use PCD whose value is straitforward (no macro and PCD)
+ if self.SymbolPattern.findall(Value):
+ continue
+ Name = TokenSpaceGuid + '.' + PcdName
+ # Don't use PCD with different values.
+ if Name in self._Symbols and self._Symbols[Name] != Value:
+ self._Symbols.pop(Name)
+ continue
+ self._Symbols[Name] = Value
+
+ Records = self._RawTable.Query(MODEL_PCD_FIXED_AT_BUILD, BelongsToItem=-1.0)
+ for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, ID, Line in Records:
+ Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
+ # Only use PCD whose value is straitforward (no macro and PCD)
+ if self.SymbolPattern.findall(Value):
+ continue
+ Name = TokenSpaceGuid+'.'+PcdName
+ # Don't use PCD with different values.
+ if Name in self._Symbols and self._Symbols[Name] != Value:
+ self._Symbols.pop(Name)
+ continue
+ self._Symbols[Name] = Value
+
+ def __ProcessDefine(self):
+ if not self._Enabled:
+ return
+
+ Type, Name, Value = self._ValueList
+ Value = ReplaceMacro(Value, self._Macros, False)
+ if self._ItemType == MODEL_META_DATA_DEFINE:
+ if self._SectionType == MODEL_META_DATA_HEADER:
+ self._FileLocalMacros[Name] = Value
+ else:
+ SectionDictKey = self._SectionType, self._Scope[0][0], self._Scope[0][1]
+ if SectionDictKey not in self._SectionsMacroDict:
+ self._SectionsMacroDict[SectionDictKey] = {}
+ SectionLocalMacros = self._SectionsMacroDict[SectionDictKey]
+ SectionLocalMacros[Name] = Value
+ elif self._ItemType == MODEL_META_DATA_GLOBAL_DEFINE:
+ GlobalData.gEdkGlobal[Name] = Value
+
+ #
+ # Keyword in [Defines] section can be used as Macros
+ #
+ if (self._ItemType == MODEL_META_DATA_HEADER) and (self._SectionType == MODEL_META_DATA_HEADER):
+ self._FileLocalMacros[Name] = Value
+
+ self._ValueList = [Type, Name, Value]
+
+ def __ProcessDirective(self):
+ Result = None
+ if self._ItemType in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF]:
+ Macros = self._Macros
+ Macros.update(GlobalData.gGlobalDefines)
+ try:
+ Result = ValueExpression(self._ValueList[1], Macros)()
+ except SymbolNotFound as Exc:
+ EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc), self._ValueList[1])
+ Result = False
+ except WrnExpression as Excpt:
+ #
+ # Catch expression evaluation warning here. We need to report
+ # the precise number of line and return the evaluation result
+ #
+ EdkLogger.warn('Parser', "Suspicious expression: %s" % str(Excpt),
+ File=self._FileWithError, ExtraData=' '.join(self._ValueList),
+ Line=self._LineIndex+1)
+ Result = Excpt.result
+ except BadExpression as Exc:
+ EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc), self._ValueList[1])
+ Result = False
+
+ if self._ItemType in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
+ self._DirectiveStack.append(self._ItemType)
+ if self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_IF:
+ Result = bool(Result)
+ else:
+ Macro = self._ValueList[1]
+ Macro = Macro[2:-1] if (Macro.startswith("$(") and Macro.endswith(")")) else Macro
+ Result = Macro in self._Macros
+ if self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF:
+ Result = not Result
+ self._DirectiveEvalStack.append(Result)
+ elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF:
+ self._DirectiveStack.append(self._ItemType)
+ self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
+ self._DirectiveEvalStack.append(bool(Result))
+ elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE:
+ self._DirectiveStack[-1] = self._ItemType
+ self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
+ elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF:
+ # Back to the nearest !if/!ifdef/!ifndef
+ while self._DirectiveStack:
+ self._DirectiveEvalStack.pop()
+ Directive = self._DirectiveStack.pop()
+ if Directive in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
+ break
+ elif self._ItemType == MODEL_META_DATA_INCLUDE:
+ # The included file must be relative to workspace or same directory as DSC file
+ __IncludeMacros = {}
+ #
+ # Allow using system environment variables in path after !include
+ #
+ __IncludeMacros['WORKSPACE'] = GlobalData.gGlobalDefines['WORKSPACE']
+
+ #
+ # Allow using MACROs comes from [Defines] section to keep compatible.
+ #
+ __IncludeMacros.update(self._Macros)
+
+ IncludedFile = NormPath(ReplaceMacro(self._ValueList[1], __IncludeMacros, RaiseError=True))
+ #
+ # First search the include file under the same directory as DSC file
+ #
+ IncludedFile1 = PathClass(IncludedFile, self.MetaFile.Dir)
+ ErrorCode, ErrorInfo1 = IncludedFile1.Validate()
+ if ErrorCode != 0:
+ #
+ # Also search file under the WORKSPACE directory
+ #
+ IncludedFile1 = PathClass(IncludedFile, GlobalData.gWorkspace)
+ ErrorCode, ErrorInfo2 = IncludedFile1.Validate()
+ if ErrorCode != 0:
+ EdkLogger.error('parser', ErrorCode, File=self._FileWithError,
+ Line=self._LineIndex+1, ExtraData=ErrorInfo1 + "\n"+ ErrorInfo2)
+
+ self._FileWithError = IncludedFile1
+
+ IncludedFileTable = MetaFileStorage(self._Table.Cur, IncludedFile1, MODEL_FILE_DSC, True)
+ Owner = self._Content[self._ContentIndex-1][0]
+ Parser = DscParser(IncludedFile1, self._FileType, IncludedFileTable,
+ Owner=Owner, From=Owner)
+
+ # set the parser status with current status
+ Parser._SectionName = self._SectionName
+ Parser._SectionType = self._SectionType
+ Parser._Scope = self._Scope
+ Parser._Enabled = self._Enabled
+ # Parse the included file
+ Parser.Start()
+
+ # update current status with sub-parser's status
+ self._SectionName = Parser._SectionName
+ self._SectionType = Parser._SectionType
+ self._Scope = Parser._Scope
+ self._Enabled = Parser._Enabled
+
+ # Insert all records in the table for the included file into dsc file table
+ Records = IncludedFileTable.GetAll()
+ if Records:
+ self._Content[self._ContentIndex:self._ContentIndex] = Records
+ self._Content.pop(self._ContentIndex-1)
+ self._ValueList = None
+ self._ContentIndex -= 1
+
+ def __ProcessSkuId(self):
+ self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=True)
+ for Value in self._ValueList]
+
+ def __ProcessLibraryInstance(self):
+ self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
+
+ def __ProcessLibraryClass(self):
+ self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros, RaiseError=True)
+
+ def __ProcessPcd(self):
+ ValueList = GetSplitValueList(self._ValueList[2])
+ #
+ # PCD value can be an expression
+ #
+ if len(ValueList) > 1 and ValueList[1] == TAB_VOID:
+ PcdValue = ValueList[0]
+ try:
+ ValueList[0] = ValueExpression(PcdValue, self._Macros)(True)
+ except WrnExpression as Value:
+ ValueList[0] = Value.result
+ else:
+ PcdValue = ValueList[-1]
+ try:
+ ValueList[-1] = ValueExpression(PcdValue, self._Macros)(True)
+ except WrnExpression as Value:
+ ValueList[-1] = Value.result
+
+ if ValueList[-1] == 'True':
+ ValueList[-1] = '1'
+ if ValueList[-1] == 'False':
+ ValueList[-1] = '0'
+
+ self._ValueList[2] = '|'.join(ValueList)
+
+ def __ProcessComponent(self):
+ self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros)
+
+ def __ProcessBuildOption(self):
+ self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=False)
+ for Value in self._ValueList]
+
+ _SectionParser = {
+ MODEL_META_DATA_HEADER : _DefineParser,
+ MODEL_EFI_SKU_ID : _SkuIdParser,
+ MODEL_EFI_LIBRARY_INSTANCE : _LibraryInstanceParser,
+ MODEL_EFI_LIBRARY_CLASS : _LibraryClassParser,
+ MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
+ MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
+ MODEL_PCD_FEATURE_FLAG : _PcdParser,
+ MODEL_PCD_DYNAMIC_DEFAULT : _PcdParser,
+ MODEL_PCD_DYNAMIC_HII : _PcdParser,
+ MODEL_PCD_DYNAMIC_VPD : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX_DEFAULT : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX_HII : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX_VPD : _PcdParser,
+ MODEL_META_DATA_COMPONENT : _ComponentParser,
+ MODEL_META_DATA_BUILD_OPTION : _BuildOptionParser,
+ MODEL_UNKNOWN : MetaFileParser._Skip,
+ MODEL_META_DATA_USER_EXTENSION : MetaFileParser._Skip,
+ MODEL_META_DATA_SECTION_HEADER : MetaFileParser._SectionHeaderParser,
+ MODEL_META_DATA_SUBSECTION_HEADER : _SubsectionHeaderParser,
+ }
+
+ _Macros = property(_GetMacros)
+
+## DEC file parser class
+#
+# @param FilePath The path of platform description file
+# @param FileType The raw data of DSC file
+# @param Table Database used to retrieve module/package information
+# @param Macros Macros used for replacement in file
+#
+class DecParser(MetaFileParser):
+ # DEC file supported data types (one type per section)
+ DataType = {
+ TAB_DEC_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
+ TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
+ }
+
+ ## Constructor of DecParser
+ #
+ # Initialize object of DecParser
+ #
+ # @param FilePath The path of platform description file
+ # @param FileType The raw data of DSC file
+ # @param Table Database used to retrieve module/package information
+ # @param Macros Macros used for replacement in file
+ #
+ def __init__(self, FilePath, FileType, Table):
+ # prevent re-initialization
+ if hasattr(self, "_Table"):
+ return
+ MetaFileParser.__init__(self, FilePath, FileType, Table)
+ self._Comments = []
+ self._Version = 0x00010005 # Only EDK2 dec file is supported
+ self.TblFile = EccGlobalData.gDb.TblFile
+ self.FileID = -1
+
+ self._CurrentStructurePcdName = ""
+ self._include_flag = False
+ self._package_flag = False
+
+ ## Parser starter
+ def Start(self):
+ Content = ''
+ try:
+ Content = open(str(self.MetaFile), 'r').readlines()
+ except:
+ EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
+
+ #
+ # Insert a record for file
+ #
+ Filename = NormPath(self.MetaFile)
+ FileID = self.TblFile.GetFileId(Filename)
+ if FileID:
+ self.FileID = FileID
+ else:
+ self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DEC)
+
+ for Index in range(0, len(Content)):
+ Line, Comment = CleanString2(Content[Index])
+ self._CurrentLine = Line
+ self._LineIndex = Index
+
+ # save comment for later use
+ if Comment:
+ self._Comments.append((Comment, self._LineIndex+1))
+ # skip empty line
+ if Line == '':
+ continue
+
+ # section header
+ if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
+ self._SectionHeaderParser()
+ self._Comments = []
+ continue
+ elif len(self._SectionType) == 0:
+ self._Comments = []
+ continue
+
+ # section content
+ self._ValueList = ['', '', '']
+ self._SectionParser[self._SectionType[0]](self)
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
+ self._ItemType = -1
+ self._Comments = []
+ continue
+
+ #
+ # Model, Value1, Value2, Value3, Arch, BelongsToItem=-1, LineBegin=-1,
+ # ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, FeatureFlag='', Enabled=-1
+ #
+ for Arch, ModuleType, Type in self._Scope:
+ self._LastItem = self._Store(
+ Type,
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ Arch,
+ ModuleType,
+ self._Owner[-1],
+ self.FileID,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ 0
+ )
+ for Comment, LineNo in self._Comments:
+ self._Store(
+ MODEL_META_DATA_COMMENT,
+ Comment,
+ self._ValueList[0],
+ self._ValueList[1],
+ Arch,
+ ModuleType,
+ self._LastItem,
+ self.FileID,
+ LineNo,
+ -1,
+ LineNo,
+ -1,
+ 0
+ )
+ self._Comments = []
+ self._Done()
+
+ def _GetApplicableSectionMacro(self):
+ Macros = {}
+ for S1, S2, SectionType in self._Scope:
+ for Scope1, Scope2 in [("COMMON", "COMMON"), ("COMMON", S2), (S1, "COMMON"), (S1, S2)]:
+ if (SectionType, Scope1, Scope2) in self._SectionsMacroDict:
+ Macros.update(self._SectionsMacroDict[(SectionType, Scope1, Scope2)])
+ return Macros
+
+ ## Section header parser
+ #
+ # The section header is always in following format:
+ #
+ # [section_name.arch<.platform|module_type>]
+ #
+ def _SectionHeaderParser(self):
+ self._Scope = []
+ self._SectionName = ''
+ self._SectionType = []
+ ArchList = set()
+ for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
+ if Item == '':
+ continue
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+
+ # different types of PCD are permissible in one section
+ self._SectionName = ItemList[0].upper()
+ if self._SectionName in self.DataType:
+ if self.DataType[self._SectionName] not in self._SectionType:
+ self._SectionType.append(self.DataType[self._SectionName])
+ else:
+ EdkLogger.warn("Parser", "Unrecognized section", File=self.MetaFile,
+ Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+ continue
+
+ if MODEL_PCD_FEATURE_FLAG in self._SectionType and len(self._SectionType) > 1:
+ EdkLogger.error(
+ 'Parser',
+ FORMAT_INVALID,
+ "%s must not be in the same section of other types of PCD" % TAB_PCDS_FEATURE_FLAG_NULL,
+ File=self.MetaFile,
+ Line=self._LineIndex+1,
+ ExtraData=self._CurrentLine
+ )
+ # S1 is always Arch
+ if len(ItemList) > 1:
+ S1 = ItemList[1].upper()
+ else:
+ S1 = 'COMMON'
+ ArchList.add(S1)
+ # S2 may be Platform or ModuleType
+ if len(ItemList) > 2:
+ S2 = ItemList[2].upper()
+ else:
+ S2 = 'COMMON'
+ if [S1, S2, self.DataType[self._SectionName]] not in self._Scope:
+ self._Scope.append([S1, S2, self.DataType[self._SectionName]])
+
+ # 'COMMON' must not be used with specific ARCHs at the same section
+ if 'COMMON' in ArchList and len(ArchList) > 1:
+ EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
+ File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+
+ ## [guids], [ppis] and [protocols] section parser
+ @ParseMacro
+ def _GuidParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ if len(TokenList) < 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name or value specified",
+ ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if TokenList[0] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name specified",
+ ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if TokenList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No GUID value specified",
+ ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if TokenList[1][0] != '{' or TokenList[1][-1] != '}' or GuidStructureStringToGuidString(TokenList[1]) == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "Invalid GUID value format",
+ ExtraData=self._CurrentLine + \
+ " (<CName> = <GuidValueInCFormat:{8,4,4,{2,2,2,2,2,2,2,2}}>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ self._ValueList[0] = TokenList[0]
+ #Parse the Guid value format
+ GuidValueList = TokenList[1].strip(' {}').split(',')
+ Index = 0
+ HexList = []
+ if len(GuidValueList) == 11:
+ for GuidValue in GuidValueList:
+ GuidValue = GuidValue.strip()
+ if GuidValue.startswith('0x') or GuidValue.startswith('0X'):
+ HexList.append('0x' + str(GuidValue[2:]))
+ Index += 1
+ continue
+ else:
+ if GuidValue.startswith('{'):
+ GuidValue = GuidValue.lstrip(' {')
+ HexList.append('0x' + str(GuidValue[2:]))
+ Index += 1
+ self._ValueList[1] = "{ %s, %s, %s, { %s, %s, %s, %s, %s, %s, %s, %s }}" % (HexList[0], HexList[1], HexList[2], HexList[3], HexList[4], HexList[5], HexList[6], HexList[7], HexList[8], HexList[9], HexList[10])
+ else:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Invalid GUID value format",
+ ExtraData=self._CurrentLine + \
+ " (<CName> = <GuidValueInCFormat:{8,4,4,{2,2,2,2,2,2,2,2}}>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ self._ValueList[0] = ''
+
+ def ParsePcdName(self,namelist):
+ if "[" in namelist[1]:
+ pcdname = namelist[1][:namelist[1].index("[")]
+ arrayindex = namelist[1][namelist[1].index("["):]
+ namelist[1] = pcdname
+ if len(namelist) == 2:
+ namelist.append(arrayindex)
+ else:
+ namelist[2] = ".".join((arrayindex,namelist[2]))
+ return namelist
+
+ def StructPcdParser(self):
+ self._ValueList[0] = self._CurrentStructurePcdName
+
+ if "|" not in self._CurrentLine:
+ if "<HeaderFiles>" == self._CurrentLine:
+ self._include_flag = True
+ self._package_flag = False
+ self._ValueList = None
+ return
+ if "<Packages>" == self._CurrentLine:
+ self._package_flag = True
+ self._ValueList = None
+ self._include_flag = False
+ return
+
+ if self._include_flag:
+ self._ValueList[1] = "<HeaderFiles>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()
+ self._ValueList[2] = self._CurrentLine
+ if self._package_flag and "}" != self._CurrentLine:
+ self._ValueList[1] = "<Packages>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()
+ self._ValueList[2] = self._CurrentLine
+ if self._CurrentLine == "}":
+ self._package_flag = False
+ self._include_flag = False
+ self._ValueList = None
+ else:
+ PcdTockens = self._CurrentLine.split(TAB_VALUE_SPLIT)
+ PcdNames = self.ParsePcdName(PcdTockens[0].split(TAB_SPLIT))
+ if len(PcdNames) == 2:
+ if PcdNames[1].strip().endswith("]"):
+ PcdName = PcdNames[1][:PcdNames[1].index('[')]
+ Index = PcdNames[1][PcdNames[1].index('['):]
+ self._ValueList[0] = TAB_SPLIT.join((PcdNames[0], PcdName))
+ self._ValueList[1] = Index
+ self._ValueList[2] = PcdTockens[1]
+ else:
+ self._CurrentStructurePcdName = ""
+ else:
+ if self._CurrentStructurePcdName != TAB_SPLIT.join(PcdNames[:2]):
+ EdkLogger.error('Parser', FORMAT_INVALID, "Pcd Name does not match: %s and %s " % (
+ self._CurrentStructurePcdName, TAB_SPLIT.join(PcdNames[:2])),
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ self._ValueList[1] = TAB_SPLIT.join(PcdNames[2:])
+ self._ValueList[2] = PcdTockens[1]
+
+ ## PCD sections parser
+ #
+ # [PcdsFixedAtBuild]
+ # [PcdsPatchableInModule]
+ # [PcdsFeatureFlag]
+ # [PcdsDynamicEx
+ # [PcdsDynamic]
+ #
+ @ParseMacro
+ def _PcdParser(self):
+ if self._CurrentStructurePcdName:
+ self.StructPcdParser()
+ return
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
+ self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ # check PCD information
+ if self._ValueList[0] == '' or self._ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ # check PCD datum information
+ if len(TokenList) < 2 or TokenList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No PCD Datum information given",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+
+
+ ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
+ PtrValue = ValueRe.findall(TokenList[1])
+
+ # Has VOID* type string, may contain "|" character in the string.
+ if len(PtrValue) != 0:
+ ptrValueList = re.sub(ValueRe, '', TokenList[1])
+ ValueList = GetSplitValueList(ptrValueList)
+ ValueList[0] = PtrValue[0]
+ else:
+ ValueList = GetSplitValueList(TokenList[1])
+
+
+ # check if there's enough datum information given
+ if len(ValueList) != 3:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Invalid PCD Datum information given",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ # check default value
+ if ValueList[0] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "Missing DefaultValue in PCD Datum information",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ # check datum type
+ if ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "Missing DatumType in PCD Datum information",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ # check token of the PCD
+ if ValueList[2] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "Missing Token in PCD Datum information",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ # check format of default value against the datum type
+ IsValid, Cause = CheckPcdDatum(ValueList[1], ValueList[0])
+ if not IsValid:
+ EdkLogger.error('Parser', FORMAT_INVALID, Cause, ExtraData=self._CurrentLine,
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if Cause == "StructurePcd":
+ self._CurrentStructurePcdName = TAB_SPLIT.join(self._ValueList[0:2])
+ self._ValueList[0] = self._CurrentStructurePcdName
+ self._ValueList[1] = ValueList[1].strip()
+
+ if EccGlobalData.gConfig.UniCheckPCDInfo == '1' or EccGlobalData.gConfig.UniCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ # check Description, Prompt information
+ PatternDesc = re.compile('##\s*([\x21-\x7E\s]*)', re.S)
+ PatternPrompt = re.compile('#\s+@Prompt\s+([\x21-\x7E\s]*)', re.S)
+ Description = None
+ Prompt = None
+ # check @ValidRange, @ValidList and @Expression format valid
+ ErrorCodeValid = '0x0 <= %s <= 0xFFFFFFFF'
+ PatternValidRangeIn = '(NOT)?\s*(\d+\s*-\s*\d+|0[xX][a-fA-F0-9]+\s*-\s*0[xX][a-fA-F0-9]+|LT\s*\d+|LT\s*0[xX][a-fA-F0-9]+|GT\s*\d+|GT\s*0[xX][a-fA-F0-9]+|LE\s*\d+|LE\s*0[xX][a-fA-F0-9]+|GE\s*\d+|GE\s*0[xX][a-fA-F0-9]+|XOR\s*\d+|XOR\s*0[xX][a-fA-F0-9]+|EQ\s*\d+|EQ\s*0[xX][a-fA-F0-9]+)'
+ PatternValidRng = re.compile('^' + '(NOT)?\s*' + PatternValidRangeIn + '$')
+ for Comment in self._Comments:
+ Comm = Comment[0].strip()
+ if not Comm:
+ continue
+ if not Description:
+ Description = PatternDesc.findall(Comm)
+ if not Prompt:
+ Prompt = PatternPrompt.findall(Comm)
+ if Comm[0] == '#':
+ ValidFormt = Comm.lstrip('#')
+ ValidFormt = ValidFormt.lstrip()
+ if ValidFormt[0:11] == '@ValidRange':
+ ValidFormt = ValidFormt[11:]
+ ValidFormt = ValidFormt.lstrip()
+ try:
+ ErrorCode, Expression = ValidFormt.split('|', 1)
+ except ValueError:
+ ErrorCode = '0x0'
+ Expression = ValidFormt
+ ErrorCode, Expression = ErrorCode.strip(), Expression.strip()
+ try:
+ if not eval(ErrorCodeValid % ErrorCode):
+ EdkLogger.warn('Parser', '@ValidRange ErrorCode(%s) of PCD %s is not valid UINT32 value.' % (ErrorCode, TokenList[0]))
+ except:
+ EdkLogger.warn('Parser', '@ValidRange ErrorCode(%s) of PCD %s is not valid UINT32 value.' % (ErrorCode, TokenList[0]))
+ if not PatternValidRng.search(Expression):
+ EdkLogger.warn('Parser', '@ValidRange Expression(%s) of PCD %s is incorrect format.' % (Expression, TokenList[0]))
+ if ValidFormt[0:10] == '@ValidList':
+ ValidFormt = ValidFormt[10:]
+ ValidFormt = ValidFormt.lstrip()
+ try:
+ ErrorCode, Expression = ValidFormt.split('|', 1)
+ except ValueError:
+ ErrorCode = '0x0'
+ Expression = ValidFormt
+ ErrorCode, Expression = ErrorCode.strip(), Expression.strip()
+ try:
+ if not eval(ErrorCodeValid % ErrorCode):
+ EdkLogger.warn('Parser', '@ValidList ErrorCode(%s) of PCD %s is not valid UINT32 value.' % (ErrorCode, TokenList[0]))
+ except:
+ EdkLogger.warn('Parser', '@ValidList ErrorCode(%s) of PCD %s is not valid UINT32 value.' % (ErrorCode, TokenList[0]))
+ Values = Expression.split(',')
+ for Value in Values:
+ Value = Value.strip()
+ try:
+ eval(Value)
+ except:
+ EdkLogger.warn('Parser', '@ValidList Expression of PCD %s include a invalid value(%s).' % (TokenList[0], Value))
+ break
+ if ValidFormt[0:11] == '@Expression':
+ ValidFormt = ValidFormt[11:]
+ ValidFormt = ValidFormt.lstrip()
+ try:
+ ErrorCode, Expression = ValidFormt.split('|', 1)
+ except ValueError:
+ ErrorCode = '0x0'
+ Expression = ValidFormt
+ ErrorCode, Expression = ErrorCode.strip(), Expression.strip()
+ try:
+ if not eval(ErrorCodeValid % ErrorCode):
+ EdkLogger.warn('Parser', '@Expression ErrorCode(%s) of PCD %s is not valid UINT32 value.' % (ErrorCode, TokenList[0]))
+ except:
+ EdkLogger.warn('Parser', '@Expression ErrorCode(%s) of PCD %s is not valid UINT32 value.' % (ErrorCode, TokenList[0]))
+ if not Expression:
+ EdkLogger.warn('Parser', '@Expression Expression of PCD %s is incorrect format.' % TokenList[0])
+ if not Description:
+ EdkLogger.warn('Parser', 'PCD %s Description information is not provided.' % TokenList[0])
+ if not Prompt:
+ EdkLogger.warn('Parser', 'PCD %s Prompt information is not provided.' % TokenList[0])
+ # check Description, Prompt localization information
+ if self._UniObj:
+ self._UniObj.CheckPcdInfo(TokenList[0])
+
+ if ValueList[0] in ['True', 'true', 'TRUE']:
+ ValueList[0] = '1'
+ elif ValueList[0] in ['False', 'false', 'FALSE']:
+ ValueList[0] = '0'
+
+ self._ValueList[2] = ValueList[0].strip() + '|' + ValueList[1].strip() + '|' + ValueList[2].strip()
+
+ _SectionParser = {
+ MODEL_META_DATA_HEADER : MetaFileParser._DefineParser,
+ MODEL_EFI_INCLUDE : MetaFileParser._PathParser,
+ MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser,
+ MODEL_EFI_GUID : _GuidParser,
+ MODEL_EFI_PPI : _GuidParser,
+ MODEL_EFI_PROTOCOL : _GuidParser,
+ MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
+ MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
+ MODEL_PCD_FEATURE_FLAG : _PcdParser,
+ MODEL_PCD_DYNAMIC : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX : _PcdParser,
+ MODEL_UNKNOWN : MetaFileParser._Skip,
+ MODEL_META_DATA_USER_EXTENSION : MetaFileParser._Skip,
+ }
+
+
+## Fdf
+#
+# This class defined the structure used in Fdf object
+#
+# @param Filename: Input value for Ffilename of Fdf file, default is None
+# @param WorkspaceDir: Input value for current workspace directory, default is None
+#
+class Fdf(object):
+ def __init__(self, Filename = None, IsToDatabase = False, WorkspaceDir = None, Database = None):
+ self.WorkspaceDir = WorkspaceDir
+ self.IsToDatabase = IsToDatabase
+
+ self.Cur = Database.Cur
+ self.TblFile = Database.TblFile
+ self.TblFdf = Database.TblFdf
+ self.FileID = -1
+ self.FileList = {}
+
+ #
+ # Load Fdf file if filename is not None
+ #
+ if Filename is not None:
+ try:
+ self.LoadFdfFile(Filename)
+ except Exception:
+ pass
+
+ #
+ # Insert a FDF file record into database
+ #
+ def InsertFile(self, Filename):
+ FileID = -1
+ Filename = NormPath(Filename)
+ if Filename not in self.FileList:
+ FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_FDF)
+ self.FileList[Filename] = FileID
+
+ return self.FileList[Filename]
+
+
+ ## Load Fdf file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Fdf file
+ #
+ def LoadFdfFile(self, Filename):
+ FileList = []
+ #
+ # Parse Fdf file
+ #
+ Filename = NormPath(Filename)
+ Fdf = FdfParser(Filename)
+ Fdf.ParseFile()
+
+ #
+ # Insert inf file and pcd information
+ #
+ if self.IsToDatabase:
+ (Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled) = \
+ (0, '', '', '', 'COMMON', 'COMMON', -1, -1, -1, -1, -1, -1, 0)
+ for Index in range(0, len(Fdf.Profile.PcdDict)):
+ pass
+ for Key in Fdf.Profile.PcdDict.keys():
+ Model = MODEL_PCD
+ Value1 = Key[1]
+ Value2 = Key[0]
+ FileName = Fdf.Profile.PcdFileLineDict[Key][0]
+ StartLine = Fdf.Profile.PcdFileLineDict[Key][1]
+ BelongsToFile = self.InsertFile(FileName)
+ self.TblFdf.Insert(Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+ for Index in range(0, len(Fdf.Profile.InfList)):
+ Model = MODEL_META_DATA_COMPONENT
+ Value1 = Fdf.Profile.InfList[Index]
+ Value2 = ''
+ FileName = Fdf.Profile.InfFileLineList[Index][0]
+ StartLine = Fdf.Profile.InfFileLineList[Index][1]
+ BelongsToFile = self.InsertFile(FileName)
+ self.TblFdf.Insert(Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+
+class UniParser(object):
+ # IsExtraUni defined the UNI file is Module UNI or extra Module UNI
+ # IsModuleUni defined the UNI file is Module UNI or Package UNI
+ def __init__(self, FilePath, IsExtraUni=False, IsModuleUni=True):
+ self.FilePath = FilePath
+ self.FileName = os.path.basename(FilePath)
+ self.IsExtraUni = IsExtraUni
+ self.IsModuleUni = IsModuleUni
+ self.FileIn = None
+ self.Missing = []
+ self.__read()
+
+ def __read(self):
+ try:
+ self.FileIn = CodecOpenLongFilePath(self.FilePath, Mode='rb', Encoding='utf_8').read()
+ except UnicodeError:
+ self.FileIn = CodecOpenLongFilePath(self.FilePath, Mode='rb', Encoding='utf_16').read()
+ except UnicodeError:
+ self.FileIn = CodecOpenLongFilePath(self.FilePath, Mode='rb', Encoding='utf_16_le').read()
+ except IOError:
+ self.FileIn = ""
+
+ def Start(self):
+ if self.IsModuleUni:
+ if self.IsExtraUni:
+ ModuleName = self.CheckKeyValid('STR_PROPERTIES_MODULE_NAME')
+ self.PrintLog('STR_PROPERTIES_MODULE_NAME', ModuleName)
+ else:
+ ModuleAbstract = self.CheckKeyValid('STR_MODULE_ABSTRACT')
+ self.PrintLog('STR_MODULE_ABSTRACT', ModuleAbstract)
+ ModuleDescription = self.CheckKeyValid('STR_MODULE_DESCRIPTION')
+ self.PrintLog('STR_MODULE_DESCRIPTION', ModuleDescription)
+ else:
+ if self.IsExtraUni:
+ PackageName = self.CheckKeyValid('STR_PROPERTIES_PACKAGE_NAME')
+ self.PrintLog('STR_PROPERTIES_PACKAGE_NAME', PackageName)
+ else:
+ PackageAbstract = self.CheckKeyValid('STR_PACKAGE_ABSTRACT')
+ self.PrintLog('STR_PACKAGE_ABSTRACT', PackageAbstract)
+ PackageDescription = self.CheckKeyValid('STR_PACKAGE_DESCRIPTION')
+ self.PrintLog('STR_PACKAGE_DESCRIPTION', PackageDescription)
+
+ def CheckKeyValid(self, Key, Contents=None):
+ if not Contents:
+ Contents = self.FileIn
+ KeyPattern = re.compile('#string\s+%s\s+.*?#language.*?".*?"' % Key, re.S)
+ if KeyPattern.search(Contents):
+ return True
+ return False
+
+ def CheckPcdInfo(self, PcdCName):
+ PromptKey = 'STR_%s_PROMPT' % PcdCName.replace('.', '_')
+ PcdPrompt = self.CheckKeyValid(PromptKey)
+ self.PrintLog(PromptKey, PcdPrompt)
+ HelpKey = 'STR_%s_HELP' % PcdCName.replace('.', '_')
+ PcdHelp = self.CheckKeyValid(HelpKey)
+ self.PrintLog(HelpKey, PcdHelp)
+
+ def PrintLog(self, Key, Value):
+ if not Value and Key not in self.Missing:
+ Msg = '%s is missing in the %s file.' % (Key, self.FileName)
+ EdkLogger.warn('Parser', Msg)
+ EccGlobalData.gDb.TblReport.Insert(EccToolError.ERROR_GENERAL_CHECK_UNI_HELP_INFO, OtherMsg=Msg, BelongsToTable='File', BelongsToItem=-2)
+ self.Missing.append(Key)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py
new file mode 100755
index 00000000..efe721fd
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py
@@ -0,0 +1,329 @@
+## @file
+# This file is used to create/update/query/erase a meta file table
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import uuid
+
+import Common.EdkLogger as EdkLogger
+import Ecc.EccGlobalData as EccGlobalData
+
+from Ecc.MetaFileWorkspace.MetaDataTable import Table
+from Ecc.MetaFileWorkspace.MetaDataTable import ConvertToSqlString
+from CommonDataClass.DataClass import MODEL_FILE_DSC, MODEL_FILE_DEC, MODEL_FILE_INF, \
+ MODEL_FILE_OTHERS
+
+class MetaFileTable(Table):
+ ## Constructor
+ def __init__(self, Cursor, MetaFile, FileType, TableName, Temporary = False):
+ self.MetaFile = MetaFile
+ self.TblFile = EccGlobalData.gDb.TblFile
+ if (FileType == MODEL_FILE_INF):
+ TableName = "Inf"
+ if (FileType == MODEL_FILE_DSC):
+ if Temporary:
+ TableName = "_%s_%s" % ("Dsc", uuid.uuid4().hex)
+ else:
+ TableName = "Dsc"
+ if (FileType == MODEL_FILE_DEC):
+ TableName = "Dec"
+
+ Table.__init__(self, Cursor, TableName, 0, Temporary)
+ self.Create(False)
+
+
+## Python class representation of table storing module data
+class ModuleTable(MetaFileTable):
+ _COLUMN_ = '''
+ ID REAL PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 TEXT NOT NULL,
+ Value2 TEXT,
+ Value3 TEXT,
+ Usage TEXT,
+ Scope1 TEXT,
+ Scope2 TEXT,
+ BelongsToItem REAL NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ '''
+ # used as table end flag, in case the changes to database is not committed to db file
+ _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1, -1"
+
+ ## Constructor
+ def __init__(self, Cursor):
+ MetaFileTable.__init__(self, Cursor, '', MODEL_FILE_INF, "Inf", False)
+
+ ## Insert a record into table Inf
+ #
+ # @param Model: Model of a Inf item
+ # @param Value1: Value1 of a Inf item
+ # @param Value2: Value2 of a Inf item
+ # @param Value3: Value3 of a Inf item
+ # @param Scope1: Arch of a Inf item
+ # @param Scope2 Platform os a Inf item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param StartLine: StartLine of a Inf item
+ # @param StartColumn: StartColumn of a Inf item
+ # @param EndLine: EndLine of a Inf item
+ # @param EndColumn: EndColumn of a Inf item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON',
+ BelongsToItem=-1, BelongsToFile = -1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0, Usage=''):
+ (Value1, Value2, Value3, Usage, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Usage, Scope1, Scope2))
+ return Table.Insert(
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Usage,
+ Scope1,
+ Scope2,
+ BelongsToItem,
+ BelongsToFile,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
+ Enabled
+ )
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ # @param Arch: The Arch attribute of Record
+ # @param Platform The Platform attribute of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model, Arch=None, Platform=None):
+ ConditionString = "Model=%s AND Enabled>=0" % Model
+ ValueString = "Value1,Value2,Value3,Usage,Scope1,Scope2,ID,StartLine"
+
+ if Arch is not None and Arch != 'COMMON':
+ ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
+ if Platform is not None and Platform != 'COMMON':
+ ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Platform
+
+ SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
+ return self.Exec(SqlCommand)
+
+## Python class representation of table storing package data
+class PackageTable(MetaFileTable):
+ _COLUMN_ = '''
+ ID REAL PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 TEXT NOT NULL,
+ Value2 TEXT,
+ Value3 TEXT,
+ Scope1 TEXT,
+ Scope2 TEXT,
+ BelongsToItem REAL NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ '''
+ # used as table end flag, in case the changes to database is not committed to db file
+ _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1, -1"
+
+ ## Constructor
+ def __init__(self, Cursor):
+ MetaFileTable.__init__(self, Cursor, '', MODEL_FILE_DEC, "Dec", False)
+
+ ## Insert table
+ #
+ # Insert a record into table Dec
+ #
+ # @param Model: Model of a Dec item
+ # @param Value1: Value1 of a Dec item
+ # @param Value2: Value2 of a Dec item
+ # @param Value3: Value3 of a Dec item
+ # @param Scope1: Arch of a Dec item
+ # @param Scope2: Module type of a Dec item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param StartLine: StartLine of a Dec item
+ # @param StartColumn: StartColumn of a Dec item
+ # @param EndLine: EndLine of a Dec item
+ # @param EndColumn: EndColumn of a Dec item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON',
+ BelongsToItem=-1, BelongsToFile = -1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
+ (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
+ return Table.Insert(
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
+ Scope2,
+ BelongsToItem,
+ BelongsToFile,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
+ Enabled
+ )
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ # @param Arch: The Arch attribute of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model, Arch=None):
+ ConditionString = "Model=%s AND Enabled>=0" % Model
+ ValueString = "Value1,Value2,Value3,Scope1,ID,StartLine"
+
+ if Arch is not None and Arch != 'COMMON':
+ ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
+
+ SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
+ return self.Exec(SqlCommand)
+
+## Python class representation of table storing platform data
+class PlatformTable(MetaFileTable):
+ _COLUMN_ = '''
+ ID REAL PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 TEXT NOT NULL,
+ Value2 TEXT,
+ Value3 TEXT,
+ Scope1 TEXT,
+ Scope2 TEXT,
+ BelongsToItem REAL NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ FromItem REAL NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ '''
+ # used as table end flag, in case the changes to database is not committed to db file
+ _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1, -1, -1"
+
+ ## Constructor
+ def __init__(self, Cursor, MetaFile = '', FileType = MODEL_FILE_DSC, Temporary = False):
+ MetaFileTable.__init__(self, Cursor, MetaFile, FileType, "Dsc", Temporary)
+
+ ## Insert table
+ #
+ # Insert a record into table Dsc
+ #
+ # @param Model: Model of a Dsc item
+ # @param Value1: Value1 of a Dsc item
+ # @param Value2: Value2 of a Dsc item
+ # @param Value3: Value3 of a Dsc item
+ # @param Scope1: Arch of a Dsc item
+ # @param Scope2: Module type of a Dsc item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param FromItem: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Dsc item
+ # @param StartColumn: StartColumn of a Dsc item
+ # @param EndLine: EndLine of a Dsc item
+ # @param EndColumn: EndColumn of a Dsc item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON', BelongsToItem=-1, BelongsToFile = -1,
+ FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1):
+ (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
+ return Table.Insert(
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
+ Scope2,
+ BelongsToItem,
+ BelongsToFile,
+ FromItem,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
+ Enabled
+ )
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ # @param Scope1: Arch of a Dsc item
+ # @param Scope2: Module type of a Dsc item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param FromItem: The item belongs to which dsc file
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model, Scope1=None, Scope2=None, BelongsToItem=None, FromItem=None):
+ ConditionString = "Model=%s AND Enabled>0" % Model
+ ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
+
+ if Scope1 is not None and Scope1 != 'COMMON':
+ ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Scope1
+ if Scope2 is not None and Scope2 != 'COMMON':
+ ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Scope2
+
+ if BelongsToItem is not None:
+ ConditionString += " AND BelongsToItem=%s" % BelongsToItem
+ else:
+ ConditionString += " AND BelongsToItem<0"
+
+ if FromItem is not None:
+ ConditionString += " AND FromItem=%s" % FromItem
+
+ SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
+ return self.Exec(SqlCommand)
+
+## Factory class to produce different storage for different type of meta-file
+class MetaFileStorage(object):
+ _FILE_TABLE_ = {
+ MODEL_FILE_INF : ModuleTable,
+ MODEL_FILE_DEC : PackageTable,
+ MODEL_FILE_DSC : PlatformTable,
+ MODEL_FILE_OTHERS : MetaFileTable,
+ }
+
+ _FILE_TYPE_ = {
+ ".inf" : MODEL_FILE_INF,
+ ".dec" : MODEL_FILE_DEC,
+ ".dsc" : MODEL_FILE_DSC,
+ }
+
+ ## Constructor
+ def __new__(Class, Cursor, MetaFile, FileType=None, Temporary=False):
+ # no type given, try to find one
+ if not FileType:
+ if MetaFile.Type in self._FILE_TYPE_:
+ FileType = Class._FILE_TYPE_[MetaFile.Type]
+ else:
+ FileType = MODEL_FILE_OTHERS
+
+ # don't pass the type around if it's well known
+ if FileType == MODEL_FILE_OTHERS:
+ Args = (Cursor, MetaFile, FileType, Temporary)
+ else:
+ Args = (Cursor, MetaFile, FileType, Temporary)
+
+ # create the storage object and return it to caller
+ return Class._FILE_TABLE_[FileType](*Args)
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/__init__.py
new file mode 100644
index 00000000..4183055e
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/MetaFileWorkspace/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'Workspace' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/ParserWarning.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/ParserWarning.py
new file mode 100755
index 00000000..121d8581
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/ParserWarning.py
@@ -0,0 +1,24 @@
+## @file
+# This file is used to be the warning class of ECC tool
+#
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## The exception class that used to report error messages when preprocessing
+#
+# Currently the "ToolName" is set to be "ECC PP".
+#
+class Warning (Exception):
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param File The FDF name
+ # @param Line The Line number that error occurs
+ #
+ def __init__(self, Str, File = None, Line = None):
+ self.message = Str
+ self.FileName = File
+ self.LineNumber = Line
+ self.ToolName = 'ECC PP'
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py
new file mode 100755
index 00000000..ca2b9c1c
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py
@@ -0,0 +1,225 @@
+## @file
+# This is an XML API that uses a syntax similar to XPath, but it is written in
+# standard python so that no extra python packages are required to use it.
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+import xml.dom.minidom
+import codecs
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+## Create a element of XML
+#
+# @param Name
+# @param String
+# @param NodeList
+# @param AttributeList
+#
+# @revel Element
+#
+def CreateXmlElement(Name, String, NodeList, AttributeList):
+ Doc = xml.dom.minidom.Document()
+ Element = Doc.createElement(Name)
+ if String != '' and String is not None:
+ Element.appendChild(Doc.createTextNode(String))
+
+ for Item in NodeList:
+ if isinstance(Item, type([])):
+ Key = Item[0]
+ Value = Item[1]
+ if Key != '' and Key is not None and Value != '' and Value is not None:
+ Node = Doc.createElement(Key)
+ Node.appendChild(Doc.createTextNode(Value))
+ Element.appendChild(Node)
+ else:
+ Element.appendChild(Item)
+ for Item in AttributeList:
+ Key = Item[0]
+ Value = Item[1]
+ if Key != '' and Key is not None and Value != '' and Value is not None:
+ Element.setAttribute(Key, Value)
+
+ return Element
+
+## Get a list of XML nodes using XPath style syntax.
+#
+# Return a list of XML DOM nodes from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty list is returned.
+#
+# @param Dom The root XML DOM node.
+# @param String A XPath style path.
+#
+# @revel Nodes A list of XML nodes matching XPath style Sting.
+#
+def XmlList(Dom, String):
+ if String is None or String == "" or Dom is None or Dom == "":
+ return []
+ if Dom.nodeType == Dom.DOCUMENT_NODE:
+ Dom = Dom.documentElement
+ if String[0] == "/":
+ String = String[1:]
+ TagList = String.split('/')
+ Nodes = [Dom]
+ Index = 0
+ End = len(TagList) - 1
+ while Index <= End:
+ ChildNodes = []
+ for Node in Nodes:
+ if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == TagList[Index]:
+ if Index < End:
+ ChildNodes.extend(Node.childNodes)
+ else:
+ ChildNodes.append(Node)
+ Nodes = ChildNodes
+ ChildNodes = []
+ Index += 1
+
+ return Nodes
+
+
+## Get a single XML node using XPath style syntax.
+#
+# Return a single XML DOM node from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM node.
+# @param String A XPath style path.
+#
+# @revel Node A single XML node matching XPath style Sting.
+#
+def XmlNode(Dom, String):
+ if String is None or String == "" or Dom is None or Dom == "":
+ return ""
+ if Dom.nodeType == Dom.DOCUMENT_NODE:
+ Dom = Dom.documentElement
+ if String[0] == "/":
+ String = String[1:]
+ TagList = String.split('/')
+ Index = 0
+ End = len(TagList) - 1
+ ChildNodes = [Dom]
+ while Index <= End:
+ for Node in ChildNodes:
+ if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == TagList[Index]:
+ if Index < End:
+ ChildNodes = Node.childNodes
+ else:
+ return Node
+ break
+ Index += 1
+ return ""
+
+
+## Get a single XML element using XPath style syntax.
+#
+# Return a single XML element from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+# @param Strin A XPath style path.
+#
+# @revel Element An XML element matching XPath style Sting.
+#
+def XmlElement(Dom, String):
+ try:
+ return XmlNode(Dom, String).firstChild.data.strip()
+ except:
+ return ""
+
+
+## Get a single XML element of the current node.
+#
+# Return a single XML element specified by the current root Dom.
+# If the input Dom is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+#
+# @revel Element An XML element in current root Dom.
+#
+def XmlElementData(Dom):
+ try:
+ return Dom.firstChild.data.strip()
+ except:
+ return ""
+
+
+## Get a list of XML elements using XPath style syntax.
+#
+# Return a list of XML elements from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty list is returned.
+#
+# @param Dom The root XML DOM object.
+# @param String A XPath style path.
+#
+# @revel Elements A list of XML elements matching XPath style Sting.
+#
+def XmlElementList(Dom, String):
+ return map(XmlElementData, XmlList(Dom, String))
+
+
+## Get the XML attribute of the current node.
+#
+# Return a single XML attribute named Attribute from the current root Dom.
+# If the input Dom or Attribute is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+# @param Attribute The name of Attribute.
+#
+# @revel Element A single XML element matching XPath style Sting.
+#
+def XmlAttribute(Dom, Attribute):
+ try:
+ return Dom.getAttribute(Attribute).strip()
+ except:
+ return ''
+
+
+## Get the XML node name of the current node.
+#
+# Return a single XML node name from the current root Dom.
+# If the input Dom is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+#
+# @revel Element A single XML element matching XPath style Sting.
+#
+def XmlNodeName(Dom):
+ try:
+ return Dom.nodeName.strip()
+ except:
+ return ''
+
+## Parse an XML file.
+#
+# Parse the input XML file named FileName and return a XML DOM it stands for.
+# If the input File is not a valid XML file, then an empty string is returned.
+#
+# @param FileName The XML file name.
+#
+# @revel Dom The Dom object achieved from the XML file.
+#
+def XmlParseFile(FileName):
+ try:
+ XmlFile = codecs.open(FileName,encoding='utf_8_sig')
+ Dom = xml.dom.minidom.parse(XmlFile)
+ XmlFile.close()
+ return Dom
+ except Exception as X:
+ print(X)
+ return ""
+
+# This acts like the main() function for the script, unless it is 'import'ed
+# into another script.
+if __name__ == '__main__':
+ # Nothing to do here. Could do some unit tests.
+ A = CreateXmlElement('AAA', 'CCC', [['AAA', '111'], ['BBB', '222']], [['A', '1'], ['B', '2']])
+ B = CreateXmlElement('ZZZ', 'CCC', [['XXX', '111'], ['YYY', '222']], [['A', '1'], ['B', '2']])
+ C = CreateXmlList('DDD', 'EEE', [A, B], ['FFF', 'GGG'])
+ print(C.toprettyxml(indent = " "))
+ pass
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Xml/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Xml/__init__.py
new file mode 100644
index 00000000..a800a619
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/Xml/__init__.py
@@ -0,0 +1,14 @@
+## @file
+# Python 'Library' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Xml
+'''
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/__init__.py
new file mode 100644
index 00000000..25cb86ed
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'Ecc' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/c.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/c.py
new file mode 100755
index 00000000..9c9c7dd0
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/c.py
@@ -0,0 +1,2654 @@
+## @file
+# This file is used to be the c coding style checking of ECC tool
+#
+# Copyright (c) 2009 - 2019, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2020, Arm Limited. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import print_function
+from __future__ import absolute_import
+import sys
+import Common.LongFilePathOs as os
+import re
+import string
+from Ecc import CodeFragmentCollector
+from Ecc import FileProfile
+from CommonDataClass import DataClass
+from Ecc import Database
+from Common import EdkLogger
+from Ecc.EccToolError import *
+from Ecc import EccGlobalData
+from Ecc import MetaDataParser
+
+IncludeFileListDict = {}
+AllIncludeFileListDict = {}
+IncludePathListDict = {}
+ComplexTypeDict = {}
+SUDict = {}
+IgnoredKeywordList = ['EFI_ERROR']
+
+def GetIgnoredDirListPattern():
+ skipList = list(EccGlobalData.gConfig.SkipDirList) + ['.svn']
+ DirString = '|'.join(skipList)
+ p = re.compile(r'.*[\\/](?:%s)[\\/]?.*' % DirString)
+ return p
+
+def GetFuncDeclPattern():
+ p = re.compile(r'(?:EFIAPI|EFI_BOOT_SERVICE|EFI_RUNTIME_SERVICE)?\s*[_\w]+\s*\(.*\)$', re.DOTALL)
+ return p
+
+def GetArrayPattern():
+ p = re.compile(r'[_\w]*\s*[\[.*\]]+')
+ return p
+
+def GetTypedefFuncPointerPattern():
+ p = re.compile('[_\w\s]*\([\w\s]*\*+\s*[_\w]+\s*\)\s*\(.*\)', re.DOTALL)
+ return p
+
+def GetDB():
+ return EccGlobalData.gDb
+
+def GetConfig():
+ return EccGlobalData.gConfig
+
+def PrintErrorMsg(ErrorType, Msg, TableName, ItemId):
+ Msg = Msg.replace('\n', '').replace('\r', '')
+ MsgPartList = Msg.split()
+ Msg = ''
+ for Part in MsgPartList:
+ Msg += Part
+ Msg += ' '
+ GetDB().TblReport.Insert(ErrorType, OtherMsg=Msg, BelongsToTable=TableName, BelongsToItem=ItemId)
+
+def GetIdType(Str):
+ Type = DataClass.MODEL_UNKNOWN
+ Str = Str.replace('#', '# ')
+ List = Str.split()
+ if len(List) < 2:
+ pass
+ elif List[1] == 'include':
+ Type = DataClass.MODEL_IDENTIFIER_INCLUDE
+ elif List[1] == 'define':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_DEFINE
+ elif List[1] == 'ifdef':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_IFDEF
+ elif List[1] == 'ifndef':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_IFNDEF
+ elif List[1] == 'endif':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_ENDIF
+ elif List[1] == 'pragma':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_PROGMA
+ else:
+ Type = DataClass.MODEL_UNKNOWN
+ return Type
+
+def SuOccurInTypedef (Su, TdList):
+ for Td in TdList:
+ if Su.StartPos[0] == Td.StartPos[0] and Su.EndPos[0] == Td.EndPos[0]:
+ return True
+ return False
+
+def GetIdentifierList():
+ IdList = []
+ for comment in FileProfile.CommentList:
+ IdComment = DataClass.IdentifierClass(-1, '', '', '', comment.Content, DataClass.MODEL_IDENTIFIER_COMMENT, -1, -1, comment.StartPos[0], comment.StartPos[1], comment.EndPos[0], comment.EndPos[1])
+ IdList.append(IdComment)
+
+ for pp in FileProfile.PPDirectiveList:
+ Type = GetIdType(pp.Content)
+ IdPP = DataClass.IdentifierClass(-1, '', '', '', pp.Content, Type, -1, -1, pp.StartPos[0], pp.StartPos[1], pp.EndPos[0], pp.EndPos[1])
+ IdList.append(IdPP)
+
+ for pe in FileProfile.PredicateExpressionList:
+ IdPE = DataClass.IdentifierClass(-1, '', '', '', pe.Content, DataClass.MODEL_IDENTIFIER_PREDICATE_EXPRESSION, -1, -1, pe.StartPos[0], pe.StartPos[1], pe.EndPos[0], pe.EndPos[1])
+ IdList.append(IdPE)
+
+ FuncDeclPattern = GetFuncDeclPattern()
+ ArrayPattern = GetArrayPattern()
+ for var in FileProfile.VariableDeclarationList:
+ DeclText = var.Declarator.lstrip()
+ FuncPointerPattern = GetTypedefFuncPointerPattern()
+ if FuncPointerPattern.match(DeclText):
+ continue
+ VarNameStartLine = var.NameStartPos[0]
+ VarNameStartColumn = var.NameStartPos[1]
+ FirstChar = DeclText[0]
+ while not FirstChar.isalpha() and FirstChar != '_':
+ if FirstChar == '*':
+ var.Modifier += '*'
+ VarNameStartColumn += 1
+ DeclText = DeclText.lstrip('*')
+ elif FirstChar == '\r':
+ DeclText = DeclText.lstrip('\r\n').lstrip('\r')
+ VarNameStartLine += 1
+ VarNameStartColumn = 0
+ elif FirstChar == '\n':
+ DeclText = DeclText.lstrip('\n')
+ VarNameStartLine += 1
+ VarNameStartColumn = 0
+ elif FirstChar == ' ':
+ DeclText = DeclText.lstrip(' ')
+ VarNameStartColumn += 1
+ elif FirstChar == '\t':
+ DeclText = DeclText.lstrip('\t')
+ VarNameStartColumn += 8
+ else:
+ DeclText = DeclText[1:]
+ VarNameStartColumn += 1
+ FirstChar = DeclText[0]
+
+ var.Declarator = DeclText
+ if FuncDeclPattern.match(var.Declarator):
+ DeclSplitList = var.Declarator.split('(')
+ FuncName = DeclSplitList[0].strip()
+ FuncNamePartList = FuncName.split()
+ if len(FuncNamePartList) > 1:
+ FuncName = FuncNamePartList[-1].strip()
+ NameStart = DeclSplitList[0].rfind(FuncName)
+ var.Declarator = var.Declarator[NameStart:]
+ if NameStart > 0:
+ var.Modifier += ' ' + DeclSplitList[0][0:NameStart]
+ Index = 0
+ PreChar = ''
+ while Index < NameStart:
+ FirstChar = DeclSplitList[0][Index]
+ if DeclSplitList[0][Index:].startswith('EFIAPI'):
+ Index += 6
+ VarNameStartColumn += 6
+ PreChar = ''
+ continue
+ elif FirstChar == '\r':
+ Index += 1
+ VarNameStartLine += 1
+ VarNameStartColumn = 0
+ elif FirstChar == '\n':
+ Index += 1
+ if PreChar != '\r':
+ VarNameStartLine += 1
+ VarNameStartColumn = 0
+ elif FirstChar == ' ':
+ Index += 1
+ VarNameStartColumn += 1
+ elif FirstChar == '\t':
+ Index += 1
+ VarNameStartColumn += 8
+ else:
+ Index += 1
+ VarNameStartColumn += 1
+ PreChar = FirstChar
+ IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', var.Declarator, FuncName, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION, -1, -1, var.StartPos[0], var.StartPos[1], VarNameStartLine, VarNameStartColumn)
+ IdList.append(IdVar)
+ continue
+
+ if var.Declarator.find('{') == -1:
+ for decl in var.Declarator.split(','):
+ DeclList = decl.split('=')
+ Name = DeclList[0].strip()
+ if ArrayPattern.match(Name):
+ LSBPos = var.Declarator.find('[')
+ var.Modifier += ' ' + Name[LSBPos:]
+ Name = Name[0:LSBPos]
+
+ IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0], var.StartPos[1], VarNameStartLine, VarNameStartColumn)
+ IdList.append(IdVar)
+ else:
+ DeclList = var.Declarator.split('=')
+ Name = DeclList[0].strip()
+ if ArrayPattern.match(Name):
+ LSBPos = var.Declarator.find('[')
+ var.Modifier += ' ' + Name[LSBPos:]
+ Name = Name[0:LSBPos]
+ IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0], var.StartPos[1], VarNameStartLine, VarNameStartColumn)
+ IdList.append(IdVar)
+
+ for enum in FileProfile.EnumerationDefinitionList:
+ LBPos = enum.Content.find('{')
+ RBPos = enum.Content.find('}')
+ Name = enum.Content[4:LBPos].strip()
+ Value = enum.Content[LBPos + 1:RBPos]
+ IdEnum = DataClass.IdentifierClass(-1, '', '', Name, Value, DataClass.MODEL_IDENTIFIER_ENUMERATE, -1, -1, enum.StartPos[0], enum.StartPos[1], enum.EndPos[0], enum.EndPos[1])
+ IdList.append(IdEnum)
+
+ for su in FileProfile.StructUnionDefinitionList:
+ if SuOccurInTypedef(su, FileProfile.TypedefDefinitionList):
+ continue
+ Type = DataClass.MODEL_IDENTIFIER_STRUCTURE
+ SkipLen = 6
+ if su.Content.startswith('union'):
+ Type = DataClass.MODEL_IDENTIFIER_UNION
+ SkipLen = 5
+ LBPos = su.Content.find('{')
+ RBPos = su.Content.find('}')
+ if LBPos == -1 or RBPos == -1:
+ Name = su.Content[SkipLen:].strip()
+ Value = ''
+ else:
+ Name = su.Content[SkipLen:LBPos].strip()
+ Value = su.Content[LBPos:RBPos + 1]
+ IdPE = DataClass.IdentifierClass(-1, '', '', Name, Value, Type, -1, -1, su.StartPos[0], su.StartPos[1], su.EndPos[0], su.EndPos[1])
+ IdList.append(IdPE)
+
+ TdFuncPointerPattern = GetTypedefFuncPointerPattern()
+ for td in FileProfile.TypedefDefinitionList:
+ Modifier = ''
+ Name = td.ToType
+ Value = td.FromType
+ if TdFuncPointerPattern.match(td.ToType):
+ Modifier = td.FromType
+ LBPos = td.ToType.find('(')
+ TmpStr = td.ToType[LBPos + 1:].strip()
+ StarPos = TmpStr.find('*')
+ if StarPos != -1:
+ Modifier += ' ' + TmpStr[0:StarPos]
+ while TmpStr[StarPos] == '*':
+# Modifier += ' ' + '*'
+ StarPos += 1
+ TmpStr = TmpStr[StarPos:].strip()
+ RBPos = TmpStr.find(')')
+ Name = TmpStr[0:RBPos]
+ Value = 'FP' + TmpStr[RBPos + 1:]
+ else:
+ while Name.startswith('*'):
+ Value += ' ' + '*'
+ Name = Name.lstrip('*').strip()
+
+ if Name.find('[') != -1:
+ LBPos = Name.find('[')
+ RBPos = Name.rfind(']')
+ Value += Name[LBPos : RBPos + 1]
+ Name = Name[0 : LBPos]
+
+ IdTd = DataClass.IdentifierClass(-1, Modifier, '', Name, Value, DataClass.MODEL_IDENTIFIER_TYPEDEF, -1, -1, td.StartPos[0], td.StartPos[1], td.EndPos[0], td.EndPos[1])
+ IdList.append(IdTd)
+
+ for funcCall in FileProfile.FunctionCallingList:
+ IdFC = DataClass.IdentifierClass(-1, '', '', funcCall.FuncName, funcCall.ParamList, DataClass.MODEL_IDENTIFIER_FUNCTION_CALLING, -1, -1, funcCall.StartPos[0], funcCall.StartPos[1], funcCall.EndPos[0], funcCall.EndPos[1])
+ IdList.append(IdFC)
+ return IdList
+
+def StripNonAlnumChars(Str):
+ StrippedStr = ''
+ for Char in Str:
+ if Char.isalnum() or Char == '_':
+ StrippedStr += Char
+ return StrippedStr
+
+def GetParamList(FuncDeclarator, FuncNameLine=0, FuncNameOffset=0):
+ FuncDeclarator = StripComments(FuncDeclarator)
+ ParamIdList = []
+ #DeclSplitList = FuncDeclarator.split('(')
+ LBPos = FuncDeclarator.find('(')
+ #if len(DeclSplitList) < 2:
+ if LBPos == -1:
+ return ParamIdList
+ #FuncName = DeclSplitList[0]
+ FuncName = FuncDeclarator[0:LBPos]
+ #ParamStr = DeclSplitList[1].rstrip(')')
+ ParamStr = FuncDeclarator[LBPos + 1:].rstrip(')')
+ LineSkipped = 0
+ OffsetSkipped = 0
+ TailChar = FuncName[-1]
+ while not TailChar.isalpha() and TailChar != '_':
+
+ if TailChar == '\n':
+ FuncName = FuncName.rstrip('\r\n').rstrip('\n')
+ LineSkipped += 1
+ OffsetSkipped = 0
+ elif TailChar == '\r':
+ FuncName = FuncName.rstrip('\r')
+ LineSkipped += 1
+ OffsetSkipped = 0
+ elif TailChar == ' ':
+ FuncName = FuncName.rstrip(' ')
+ OffsetSkipped += 1
+ elif TailChar == '\t':
+ FuncName = FuncName.rstrip('\t')
+ OffsetSkipped += 8
+ else:
+ FuncName = FuncName[:-1]
+ TailChar = FuncName[-1]
+
+ OffsetSkipped += 1 #skip '('
+
+ for p in ParamStr.split(','):
+ ListP = p.split()
+ if len(ListP) == 0:
+ continue
+ ParamName = ListP[-1]
+ DeclText = ParamName.strip()
+ RightSpacePos = p.rfind(ParamName)
+ ParamModifier = p[0:RightSpacePos]
+ if ParamName == 'OPTIONAL':
+ if ParamModifier == '':
+ ParamModifier += ' ' + 'OPTIONAL'
+ DeclText = ''
+ else:
+ ParamName = ListP[-2]
+ DeclText = ParamName.strip()
+ RightSpacePos = p.rfind(ParamName)
+ ParamModifier = p[0:RightSpacePos]
+ ParamModifier += 'OPTIONAL'
+ while DeclText.startswith('*'):
+ ParamModifier += ' ' + '*'
+ DeclText = DeclText.lstrip('*').strip()
+ ParamName = DeclText
+ # ignore array length if exists.
+ LBIndex = ParamName.find('[')
+ if LBIndex != -1:
+ ParamName = ParamName[0:LBIndex]
+
+ Start = RightSpacePos
+ Index = 0
+ PreChar = ''
+ while Index < Start:
+ FirstChar = p[Index]
+
+ if FirstChar == '\r':
+ Index += 1
+ LineSkipped += 1
+ OffsetSkipped = 0
+ elif FirstChar == '\n':
+ Index += 1
+ if PreChar != '\r':
+ LineSkipped += 1
+ OffsetSkipped = 0
+ elif FirstChar == ' ':
+ Index += 1
+ OffsetSkipped += 1
+ elif FirstChar == '\t':
+ Index += 1
+ OffsetSkipped += 8
+ else:
+ Index += 1
+ OffsetSkipped += 1
+ PreChar = FirstChar
+
+ ParamBeginLine = FuncNameLine + LineSkipped
+ ParamBeginOffset = FuncNameOffset + OffsetSkipped
+
+ Index = Start + len(ParamName)
+ PreChar = ''
+ while Index < len(p):
+ FirstChar = p[Index]
+
+ if FirstChar == '\r':
+ Index += 1
+ LineSkipped += 1
+ OffsetSkipped = 0
+ elif FirstChar == '\n':
+ Index += 1
+ if PreChar != '\r':
+ LineSkipped += 1
+ OffsetSkipped = 0
+ elif FirstChar == ' ':
+ Index += 1
+ OffsetSkipped += 1
+ elif FirstChar == '\t':
+ Index += 1
+ OffsetSkipped += 8
+ else:
+ Index += 1
+ OffsetSkipped += 1
+ PreChar = FirstChar
+
+ ParamEndLine = FuncNameLine + LineSkipped
+ ParamEndOffset = FuncNameOffset + OffsetSkipped
+ if ParamName != '...':
+ ParamName = StripNonAlnumChars(ParamName)
+ IdParam = DataClass.IdentifierClass(-1, ParamModifier, '', ParamName, '', DataClass.MODEL_IDENTIFIER_PARAMETER, -1, -1, ParamBeginLine, ParamBeginOffset, ParamEndLine, ParamEndOffset)
+ ParamIdList.append(IdParam)
+
+ OffsetSkipped += 1 #skip ','
+
+ return ParamIdList
+
+def GetFunctionList():
+ FuncObjList = []
+ for FuncDef in FileProfile.FunctionDefinitionList:
+ ParamIdList = []
+ DeclText = FuncDef.Declarator.lstrip()
+ FuncNameStartLine = FuncDef.NamePos[0]
+ FuncNameStartColumn = FuncDef.NamePos[1]
+ FirstChar = DeclText[0]
+ while not FirstChar.isalpha() and FirstChar != '_':
+ if FirstChar == '*':
+ FuncDef.Modifier += '*'
+ FuncNameStartColumn += 1
+ DeclText = DeclText.lstrip('*')
+ elif FirstChar == '\r':
+ DeclText = DeclText.lstrip('\r\n').lstrip('\r')
+ FuncNameStartLine += 1
+ FuncNameStartColumn = 0
+ elif FirstChar == '\n':
+ DeclText = DeclText.lstrip('\n')
+ FuncNameStartLine += 1
+ FuncNameStartColumn = 0
+ elif FirstChar == ' ':
+ DeclText = DeclText.lstrip(' ')
+ FuncNameStartColumn += 1
+ elif FirstChar == '\t':
+ DeclText = DeclText.lstrip('\t')
+ FuncNameStartColumn += 8
+ else:
+ DeclText = DeclText[1:]
+ FuncNameStartColumn += 1
+ FirstChar = DeclText[0]
+
+ FuncDef.Declarator = DeclText
+ DeclSplitList = FuncDef.Declarator.split('(')
+ if len(DeclSplitList) < 2:
+ continue
+
+ FuncName = DeclSplitList[0]
+ FuncNamePartList = FuncName.split()
+ if len(FuncNamePartList) > 1:
+ FuncName = FuncNamePartList[-1]
+ NameStart = DeclSplitList[0].rfind(FuncName)
+ if NameStart > 0:
+ FuncDef.Modifier += ' ' + DeclSplitList[0][0:NameStart]
+ Index = 0
+ PreChar = ''
+ while Index < NameStart:
+ FirstChar = DeclSplitList[0][Index]
+ if DeclSplitList[0][Index:].startswith('EFIAPI'):
+ Index += 6
+ FuncNameStartColumn += 6
+ PreChar = ''
+ continue
+ elif FirstChar == '\r':
+ Index += 1
+ FuncNameStartLine += 1
+ FuncNameStartColumn = 0
+ elif FirstChar == '\n':
+ Index += 1
+ if PreChar != '\r':
+ FuncNameStartLine += 1
+ FuncNameStartColumn = 0
+ elif FirstChar == ' ':
+ Index += 1
+ FuncNameStartColumn += 1
+ elif FirstChar == '\t':
+ Index += 1
+ FuncNameStartColumn += 8
+ else:
+ Index += 1
+ FuncNameStartColumn += 1
+ PreChar = FirstChar
+
+ FuncObj = DataClass.FunctionClass(-1, FuncDef.Declarator, FuncDef.Modifier, FuncName.strip(), '', FuncDef.StartPos[0], FuncDef.StartPos[1], FuncDef.EndPos[0], FuncDef.EndPos[1], FuncDef.LeftBracePos[0], FuncDef.LeftBracePos[1], -1, ParamIdList, [], FuncNameStartLine, FuncNameStartColumn)
+ FuncObjList.append(FuncObj)
+
+ return FuncObjList
+
+def GetFileModificationTimeFromDB(FullFileName):
+ TimeValue = 0.0
+ Db = GetDB()
+ SqlStatement = """ select TimeStamp
+ from File
+ where FullPath = \'%s\'
+ """ % (FullFileName)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ TimeValue = Result[0]
+ return TimeValue
+
+def CollectSourceCodeDataIntoDB(RootDir):
+ FileObjList = []
+ tuple = os.walk(RootDir)
+ IgnoredPattern = GetIgnoredDirListPattern()
+ ParseErrorFileList = []
+ TokenReleaceList = EccGlobalData.gConfig.TokenReleaceList
+ TokenReleaceList.extend(['L",\\\""'])
+
+ for dirpath, dirnames, filenames in tuple:
+ if IgnoredPattern.match(dirpath.upper()):
+ continue
+
+ for Dir in dirnames:
+ Dirname = os.path.join(dirpath, Dir)
+ if os.path.islink(Dirname):
+ Dirname = os.path.realpath(Dirname)
+ if os.path.isdir(Dirname):
+ # symlinks to directories are treated as directories
+ dirnames.remove(Dir)
+ dirnames.append(Dirname)
+
+ for f in filenames:
+ if f.lower() in EccGlobalData.gConfig.SkipFileList:
+ continue
+ collector = None
+ FullName = os.path.normpath(os.path.join(dirpath, f))
+ model = DataClass.MODEL_FILE_OTHERS
+ if os.path.splitext(f)[1] in ('.h', '.c'):
+ EdkLogger.info("Parsing " + FullName)
+ model = f.endswith('c') and DataClass.MODEL_FILE_C or DataClass.MODEL_FILE_H
+ collector = CodeFragmentCollector.CodeFragmentCollector(FullName)
+ collector.TokenReleaceList = TokenReleaceList
+ try:
+ collector.ParseFile()
+ except UnicodeError:
+ ParseErrorFileList.append(FullName)
+ collector.CleanFileProfileBuffer()
+ collector.ParseFileWithClearedPPDirective()
+# collector.PrintFragments()
+ BaseName = os.path.basename(f)
+ DirName = os.path.dirname(FullName)
+ Ext = os.path.splitext(f)[1].lstrip('.')
+ ModifiedTime = os.path.getmtime(FullName)
+ FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])
+ FileObjList.append(FileObj)
+ if collector:
+ collector.CleanFileProfileBuffer()
+
+ if len(ParseErrorFileList) > 0:
+ EdkLogger.info("Found unrecoverable error during parsing:\n\t%s\n" % "\n\t".join(ParseErrorFileList))
+
+ Db = GetDB()
+ for file in FileObjList:
+ if file.ExtName.upper() not in ['INF', 'DEC', 'DSC', 'FDF']:
+ Db.InsertOneFile(file)
+
+ Db.UpdateIdentifierBelongsToFunction()
+
+def GetTableID(FullFileName, ErrorMsgList=None):
+ if ErrorMsgList is None:
+ ErrorMsgList = []
+
+ Db = GetDB()
+ SqlStatement = """ select ID
+ from File
+ where FullPath like '%s'
+ """ % FullFileName
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ FileID = -1
+ for Result in ResultSet:
+ if FileID != -1:
+ ErrorMsgList.append('Duplicate file ID found in DB for file %s' % FullFileName)
+ return - 2
+ FileID = Result[0]
+ if FileID == -1:
+ ErrorMsgList.append('NO file ID found in DB for file %s' % FullFileName)
+ return - 1
+ return FileID
+
+def GetIncludeFileList(FullFileName):
+ if os.path.splitext(FullFileName)[1].upper() not in ('.H'):
+ return []
+ IFList = IncludeFileListDict.get(FullFileName)
+ if IFList is not None:
+ return IFList
+
+ FileID = GetTableID(FullFileName)
+ if FileID < 0:
+ return []
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_INCLUDE)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ IncludeFileListDict[FullFileName] = ResultSet
+ return ResultSet
+
+def GetFullPathOfIncludeFile(Str, IncludePathList):
+ for IncludePath in IncludePathList:
+ FullPath = os.path.join(IncludePath, Str)
+ FullPath = os.path.normpath(FullPath)
+ if os.path.exists(FullPath):
+ return FullPath
+ return None
+
+def GetAllIncludeFiles(FullFileName):
+ if AllIncludeFileListDict.get(FullFileName) is not None:
+ return AllIncludeFileListDict.get(FullFileName)
+
+ FileDirName = os.path.dirname(FullFileName)
+ IncludePathList = IncludePathListDict.get(FileDirName)
+ if IncludePathList is None:
+ IncludePathList = MetaDataParser.GetIncludeListOfFile(EccGlobalData.gWorkspace, FullFileName, GetDB())
+ if FileDirName not in IncludePathList:
+ IncludePathList.insert(0, FileDirName)
+ IncludePathListDict[FileDirName] = IncludePathList
+ IncludeFileQueue = []
+ for IncludeFile in GetIncludeFileList(FullFileName):
+ FileName = IncludeFile[0].lstrip('#').strip()
+ FileName = FileName.lstrip('include').strip()
+ FileName = FileName.strip('\"')
+ FileName = FileName.lstrip('<').rstrip('>').strip()
+ FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)
+ if FullPath is not None:
+ IncludeFileQueue.append(FullPath)
+
+ i = 0
+ while i < len(IncludeFileQueue):
+ for IncludeFile in GetIncludeFileList(IncludeFileQueue[i]):
+ FileName = IncludeFile[0].lstrip('#').strip()
+ FileName = FileName.lstrip('include').strip()
+ FileName = FileName.strip('\"')
+ FileName = FileName.lstrip('<').rstrip('>').strip()
+ FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)
+ if FullPath is not None and FullPath not in IncludeFileQueue:
+ IncludeFileQueue.insert(i + 1, FullPath)
+ i += 1
+
+ AllIncludeFileListDict[FullFileName] = IncludeFileQueue
+ return IncludeFileQueue
+
+def GetPredicateListFromPredicateExpStr(PES):
+
+ PredicateList = []
+ i = 0
+ PredicateBegin = 0
+ #PredicateEnd = 0
+ LogicOpPos = -1
+ p = GetFuncDeclPattern()
+ while i < len(PES) - 1:
+ if (PES[i].isalnum() or PES[i] == '_' or PES[i] == '*') and LogicOpPos > PredicateBegin:
+ PredicateBegin = i
+ if (PES[i] == '&' and PES[i + 1] == '&') or (PES[i] == '|' and PES[i + 1] == '|'):
+ LogicOpPos = i
+ Exp = PES[PredicateBegin:i].strip()
+ # Exp may contain '.' or '->'
+ TmpExp = Exp.replace('.', '').replace('->', '')
+ if p.match(TmpExp):
+ PredicateList.append(Exp)
+ else:
+ PredicateList.append(Exp.rstrip(';').rstrip(')').strip())
+ i += 1
+
+ if PredicateBegin > LogicOpPos:
+ while PredicateBegin < len(PES):
+ if PES[PredicateBegin].isalnum() or PES[PredicateBegin] == '_' or PES[PredicateBegin] == '*':
+ break
+ PredicateBegin += 1
+ Exp = PES[PredicateBegin:len(PES)].strip()
+ # Exp may contain '.' or '->'
+ TmpExp = Exp.replace('.', '').replace('->', '')
+ if p.match(TmpExp):
+ PredicateList.append(Exp)
+ else:
+ PredicateList.append(Exp.rstrip(';').rstrip(')').strip())
+ return PredicateList
+
+def GetCNameList(Lvalue, StarList=[]):
+ Lvalue += ' '
+ i = 0
+ SearchBegin = 0
+ VarStart = -1
+ VarEnd = -1
+ VarList = []
+
+ while SearchBegin < len(Lvalue):
+ while i < len(Lvalue):
+ if Lvalue[i].isalnum() or Lvalue[i] == '_':
+ if VarStart == -1:
+ VarStart = i
+ VarEnd = i
+ i += 1
+ elif VarEnd != -1:
+ VarList.append(Lvalue[VarStart:VarEnd + 1])
+ i += 1
+ break
+ else:
+ if VarStart == -1 and Lvalue[i] == '*':
+ StarList.append('*')
+ i += 1
+ if VarEnd == -1:
+ break
+
+
+ DotIndex = Lvalue[VarEnd:].find('.')
+ ArrowIndex = Lvalue[VarEnd:].find('->')
+ if DotIndex == -1 and ArrowIndex == -1:
+ break
+ elif DotIndex == -1 and ArrowIndex != -1:
+ SearchBegin = VarEnd + ArrowIndex
+ elif ArrowIndex == -1 and DotIndex != -1:
+ SearchBegin = VarEnd + DotIndex
+ else:
+ SearchBegin = VarEnd + ((DotIndex < ArrowIndex) and DotIndex or ArrowIndex)
+
+ i = SearchBegin
+ VarStart = -1
+ VarEnd = -1
+
+ return VarList
+
+def SplitPredicateByOp(Str, Op, IsFuncCalling=False):
+
+ Name = Str.strip()
+ Value = None
+
+ if IsFuncCalling:
+ Index = 0
+ LBFound = False
+ UnmatchedLBCount = 0
+ while Index < len(Str):
+ while not LBFound and Str[Index] != '_' and not Str[Index].isalnum():
+ Index += 1
+
+ while not LBFound and (Str[Index].isalnum() or Str[Index] == '_'):
+ Index += 1
+ # maybe type-cast at the beginning, skip it.
+ RemainingStr = Str[Index:].lstrip()
+ if RemainingStr.startswith(')') and not LBFound:
+ Index += 1
+ continue
+
+ if RemainingStr.startswith('(') and not LBFound:
+ LBFound = True
+
+ if Str[Index] == '(':
+ UnmatchedLBCount += 1
+ Index += 1
+ continue
+
+ if Str[Index] == ')':
+ UnmatchedLBCount -= 1
+ Index += 1
+ if UnmatchedLBCount == 0:
+ break
+ continue
+
+ Index += 1
+
+ if UnmatchedLBCount > 0:
+ return [Name]
+
+ IndexInRemainingStr = Str[Index:].find(Op)
+ if IndexInRemainingStr == -1:
+ return [Name]
+
+ Name = Str[0:Index + IndexInRemainingStr].strip()
+ Value = Str[Index + IndexInRemainingStr + len(Op):].strip().strip(')')
+ return [Name, Value]
+
+ TmpStr = Str.rstrip(';').rstrip(')')
+ while True:
+ Index = TmpStr.rfind(Op)
+ if Index == -1:
+ return [Name]
+
+ if Str[Index - 1].isalnum() or Str[Index - 1].isspace() or Str[Index - 1] == ')' or Str[Index - 1] == ']':
+ Name = Str[0:Index].strip()
+ Value = Str[Index + len(Op):].strip()
+ return [Name, Value]
+
+ TmpStr = Str[0:Index - 1]
+
+def SplitPredicateStr(Str):
+
+ Str = Str.lstrip('(')
+ IsFuncCalling = False
+ p = GetFuncDeclPattern()
+ TmpStr = Str.replace('.', '').replace('->', '')
+ if p.match(TmpStr):
+ IsFuncCalling = True
+
+ PredPartList = SplitPredicateByOp(Str, '==', IsFuncCalling)
+ if len(PredPartList) > 1:
+ return [PredPartList, '==']
+
+ PredPartList = SplitPredicateByOp(Str, '!=', IsFuncCalling)
+ if len(PredPartList) > 1:
+ return [PredPartList, '!=']
+
+ PredPartList = SplitPredicateByOp(Str, '>=', IsFuncCalling)
+ if len(PredPartList) > 1:
+ return [PredPartList, '>=']
+
+ PredPartList = SplitPredicateByOp(Str, '<=', IsFuncCalling)
+ if len(PredPartList) > 1:
+ return [PredPartList, '<=']
+
+ PredPartList = SplitPredicateByOp(Str, '>', IsFuncCalling)
+ if len(PredPartList) > 1:
+ return [PredPartList, '>']
+
+ PredPartList = SplitPredicateByOp(Str, '<', IsFuncCalling)
+ if len(PredPartList) > 1:
+ return [PredPartList, '<']
+
+ return [[Str, None], None]
+
+def GetFuncContainsPE(ExpLine, ResultSet):
+ for Result in ResultSet:
+ if Result[0] < ExpLine and Result[1] > ExpLine:
+ return Result
+ return None
+
+def PatternInModifier(Modifier, SubStr):
+ PartList = Modifier.split()
+ for Part in PartList:
+ if Part == SubStr:
+ return True
+ return False
+
+def GetDataTypeFromModifier(ModifierStr):
+ MList = ModifierStr.split()
+ ReturnType = ''
+ for M in MList:
+ if M in EccGlobalData.gConfig.ModifierSet:
+ continue
+ # remove array suffix
+ if M.startswith('[') or M.endswith(']'):
+ continue
+ ReturnType += M + ' '
+
+ ReturnType = ReturnType.strip()
+ if len(ReturnType) == 0:
+ ReturnType = 'VOID'
+ return ReturnType
+
+def DiffModifier(Str1, Str2):
+ PartList1 = Str1.split()
+ PartList2 = Str2.split()
+ if PartList1 == PartList2:
+ return False
+ else:
+ return True
+
+def GetTypedefDict(FullFileName):
+
+ Dict = ComplexTypeDict.get(FullFileName)
+ if Dict is not None:
+ return Dict
+
+ FileID = GetTableID(FullFileName)
+ FileTable = 'Identifier' + str(FileID)
+ Db = GetDB()
+ SqlStatement = """ select Modifier, Name, Value, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_TYPEDEF)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ Dict = {}
+ for Result in ResultSet:
+ if len(Result[0]) == 0:
+ Dict[Result[1]] = Result[2]
+
+ IncludeFileList = GetAllIncludeFiles(FullFileName)
+ for F in IncludeFileList:
+ FileID = GetTableID(F)
+ if FileID < 0:
+ continue
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, Name, Value, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_TYPEDEF)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ if not Result[2].startswith('FP ('):
+ Dict[Result[1]] = Result[2]
+ else:
+ if len(Result[0]) == 0:
+ Dict[Result[1]] = 'VOID'
+ else:
+ Dict[Result[1]] = GetDataTypeFromModifier(Result[0])
+
+ ComplexTypeDict[FullFileName] = Dict
+ return Dict
+
+def GetSUDict(FullFileName):
+
+ Dict = SUDict.get(FullFileName)
+ if Dict is not None:
+ return Dict
+
+ FileID = GetTableID(FullFileName)
+ FileTable = 'Identifier' + str(FileID)
+ Db = GetDB()
+ SqlStatement = """ select Name, Value, ID
+ from %s
+ where Model = %d or Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_STRUCTURE, DataClass.MODEL_IDENTIFIER_UNION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ Dict = {}
+ for Result in ResultSet:
+ if len(Result[1]) > 0:
+ Dict[Result[0]] = Result[1]
+
+ IncludeFileList = GetAllIncludeFiles(FullFileName)
+ for F in IncludeFileList:
+ FileID = GetTableID(F)
+ if FileID < 0:
+ continue
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Name, Value, ID
+ from %s
+ where Model = %d or Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_STRUCTURE, DataClass.MODEL_IDENTIFIER_UNION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ if len(Result[1]) > 0:
+ Dict[Result[0]] = Result[1]
+
+ SUDict[FullFileName] = Dict
+ return Dict
+
+def StripComments(Str):
+ Str += ' '
+ ListFromStr = list(Str)
+
+ InComment = False
+ DoubleSlashComment = False
+ Index = 0
+ while Index < len(ListFromStr):
+ # meet new line, then no longer in a comment for //
+ if ListFromStr[Index] == '\n':
+ if InComment and DoubleSlashComment:
+ InComment = False
+ DoubleSlashComment = False
+ Index += 1
+ # check for */ comment end
+ elif InComment and not DoubleSlashComment and ListFromStr[Index] == '*' and ListFromStr[Index + 1] == '/':
+ ListFromStr[Index] = ' '
+ Index += 1
+ ListFromStr[Index] = ' '
+ Index += 1
+ InComment = False
+ # set comments to spaces
+ elif InComment:
+ ListFromStr[Index] = ' '
+ Index += 1
+ # check for // comment
+ elif ListFromStr[Index] == '/' and ListFromStr[Index + 1] == '/':
+ InComment = True
+ DoubleSlashComment = True
+
+ # check for /* comment start
+ elif ListFromStr[Index] == '/' and ListFromStr[Index + 1] == '*':
+ ListFromStr[Index] = ' '
+ Index += 1
+ ListFromStr[Index] = ' '
+ Index += 1
+ InComment = True
+ else:
+ Index += 1
+
+ # restore from List to String
+ Str = "".join(ListFromStr)
+ Str = Str.rstrip(' ')
+
+ return Str
+
+def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):
+ Value = TypedefDict.get(Type)
+ if Value is None:
+ Value = SUDict.get(Type)
+ if Value is None:
+ return None
+
+ LBPos = Value.find('{')
+ while LBPos == -1:
+ FTList = Value.split()
+ for FT in FTList:
+ if FT not in ('struct', 'union'):
+ Value = TypedefDict.get(FT)
+ if Value is None:
+ Value = SUDict.get(FT)
+ break
+
+ if Value is None:
+ return None
+
+ LBPos = Value.find('{')
+
+# RBPos = Value.find('}')
+ Fields = Value[LBPos + 1:]
+ Fields = StripComments(Fields)
+ FieldsList = Fields.split(';')
+ for Field in FieldsList:
+ Field = Field.strip()
+ Index = Field.rfind(FieldName)
+ if Index < 1:
+ continue
+ if not Field[Index - 1].isalnum():
+ if Index + len(FieldName) == len(Field):
+ Type = GetDataTypeFromModifier(Field[0:Index])
+ return Type.strip()
+ else:
+ # For the condition that the field in struct is an array with [] suffixes...
+ if not Field[Index + len(FieldName)].isalnum():
+ Type = GetDataTypeFromModifier(Field[0:Index])
+ return Type.strip()
+
+ return None
+
+def GetRealType(Type, TypedefDict, TargetType=None):
+ if TargetType is not None and Type == TargetType:
+ return Type
+ while TypedefDict.get(Type):
+ Type = TypedefDict.get(Type)
+ if TargetType is not None and Type == TargetType:
+ return Type
+ return Type
+
+def GetTypeInfo(RefList, Modifier, FullFileName, TargetType=None):
+ TypedefDict = GetTypedefDict(FullFileName)
+ SUDict = GetSUDict(FullFileName)
+ Type = GetDataTypeFromModifier(Modifier).replace('*', '').strip()
+
+ Type = Type.split()[-1]
+ Index = 0
+ while Index < len(RefList):
+ FieldName = RefList[Index]
+ FromType = GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict)
+ if FromType is None:
+ return None
+ # we want to determine the exact type.
+ if TargetType is not None:
+ Type = FromType.split()[0]
+ # we only want to check if it is a pointer
+ else:
+ Type = FromType
+ if Type.find('*') != -1 and Index == len(RefList) - 1:
+ return Type
+ Type = FromType.split()[0]
+
+ Index += 1
+
+ Type = GetRealType(Type, TypedefDict, TargetType)
+
+ return Type
+
+def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetType=None, StarList=None):
+
+ PredVar = PredVarList[0]
+ FileID = GetTableID(FullFileName)
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ # search variable in include files
+
+ # it is a function call, search function declarations and definitions
+ if IsFuncCall:
+ SqlStatement = """ select Modifier, ID
+ from %s
+ where Model = %d and Value = \'%s\'
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION, PredVar)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ Type = GetDataTypeFromModifier(Result[0]).split()[-1]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ IncludeFileList = GetAllIncludeFiles(FullFileName)
+ for F in IncludeFileList:
+ FileID = GetTableID(F)
+ if FileID < 0:
+ continue
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, ID
+ from %s
+ where Model = %d and Value = \'%s\'
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION, PredVar)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ Type = GetDataTypeFromModifier(Result[0]).split()[-1]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ FileID = GetTableID(FullFileName)
+ SqlStatement = """ select Modifier, ID
+ from Function
+ where BelongsToFile = %d and Name = \'%s\'
+ """ % (FileID, PredVar)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ Type = GetDataTypeFromModifier(Result[0]).split()[-1]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ for F in IncludeFileList:
+ FileID = GetTableID(F)
+ if FileID < 0:
+ continue
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, ID
+ from Function
+ where BelongsToFile = %d and Name = \'%s\'
+ """ % (FileID, PredVar)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ Type = GetDataTypeFromModifier(Result[0]).split()[-1]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ return None
+
+ # really variable, search local variable first
+ SqlStatement = """ select Modifier, ID
+ from %s
+ where Model = %d and Name = \'%s\' and StartLine >= %d and StartLine <= %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE, PredVar, FuncRecord[0], FuncRecord[1])
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ VarFound = False
+ for Result in ResultSet:
+ if len(PredVarList) > 1:
+ Type = GetTypeInfo(PredVarList[1:], Result[0], FullFileName, TargetType)
+ return Type
+ else:
+# Type = GetDataTypeFromModifier(Result[0]).split()[-1]
+ TypeList = GetDataTypeFromModifier(Result[0]).split()
+ Type = TypeList[-1]
+ if len(TypeList) > 1 and StarList is not None:
+ for Star in StarList:
+ Type = Type.strip()
+ Type = Type.rstrip(Star)
+ # Get real type after de-reference pointers.
+ if len(Type.strip()) == 0:
+ Type = TypeList[-2]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ # search function parameters second
+ ParamList = GetParamList(FuncRecord[2])
+ for Param in ParamList:
+ if Param.Name.strip() == PredVar:
+ if len(PredVarList) > 1:
+ Type = GetTypeInfo(PredVarList[1:], Param.Modifier, FullFileName, TargetType)
+ return Type
+ else:
+ TypeList = GetDataTypeFromModifier(Param.Modifier).split()
+ Type = TypeList[-1]
+ if Type == '*' and len(TypeList) >= 2:
+ Type = TypeList[-2]
+ if len(TypeList) > 1 and StarList is not None:
+ for Star in StarList:
+ Type = Type.strip()
+ Type = Type.rstrip(Star)
+ # Get real type after de-reference pointers.
+ if len(Type.strip()) == 0:
+ Type = TypeList[-2]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ # search global variable next
+ SqlStatement = """ select Modifier, ID
+ from %s
+ where Model = %d and Name = \'%s\' and BelongsToFunction = -1
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE, PredVar)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ if len(PredVarList) > 1:
+ Type = GetTypeInfo(PredVarList[1:], Result[0], FullFileName, TargetType)
+ return Type
+ else:
+ TypeList = GetDataTypeFromModifier(Result[0]).split()
+ Type = TypeList[-1]
+ if len(TypeList) > 1 and StarList is not None:
+ for Star in StarList:
+ Type = Type.strip()
+ Type = Type.rstrip(Star)
+ # Get real type after de-reference pointers.
+ if len(Type.strip()) == 0:
+ Type = TypeList[-2]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ IncludeFileList = GetAllIncludeFiles(FullFileName)
+ for F in IncludeFileList:
+ FileID = GetTableID(F)
+ if FileID < 0:
+ continue
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, ID
+ from %s
+ where Model = %d and BelongsToFunction = -1 and Name = \'%s\'
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE, PredVar)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ if len(PredVarList) > 1:
+ Type = GetTypeInfo(PredVarList[1:], Result[0], FullFileName, TargetType)
+ return Type
+ else:
+ TypeList = GetDataTypeFromModifier(Result[0]).split()
+ Type = TypeList[-1]
+ if len(TypeList) > 1 and StarList is not None:
+ for Star in StarList:
+ Type = Type.strip()
+ Type = Type.rstrip(Star)
+ # Get real type after de-reference pointers.
+ if len(Type.strip()) == 0:
+ Type = TypeList[-2]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+def GetTypeFromArray(Type, Var):
+ Count = Var.count('[')
+
+ while Count > 0:
+ Type = Type.strip()
+ Type = Type.rstrip('*')
+ Count = Count - 1
+
+ return Type
+
+def CheckFuncLayoutReturnType(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, ID, StartLine, StartColumn, EndLine, Value
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ ReturnType = GetDataTypeFromModifier(Result[0])
+ TypeStart = ReturnType.split()[0]
+ FuncName = Result[5]
+ if EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, FuncName):
+ continue
+ Result0 = Result[0]
+ if Result0.upper().startswith('STATIC'):
+ Result0 = Result0[6:].strip()
+ Index = Result0.find(TypeStart)
+ if Index != 0 or Result[3] != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, '[%s] Return Type should appear at the start of line' % FuncName, FileTable, Result[1])
+
+ if Result[2] == Result[4]:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, '[%s] Return Type should appear on its own line' % FuncName, FileTable, Result[1])
+
+ SqlStatement = """ select Modifier, ID, StartLine, StartColumn, FunNameStartLine, Name
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ ReturnType = GetDataTypeFromModifier(Result[0])
+ TypeStart = ReturnType.split()[0]
+ FuncName = Result[5]
+ if EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, FuncName):
+ continue
+ Result0 = Result[0]
+ if Result0.upper().startswith('STATIC'):
+ Result0 = Result0[6:].strip()
+ Index = Result0.find(TypeStart)
+ if Index != 0 or Result[3] != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, '[%s] Return Type should appear at the start of line' % FuncName, 'Function', Result[1])
+
+def CheckFuncLayoutModifier(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ ReturnType = GetDataTypeFromModifier(Result[0])
+ TypeStart = ReturnType.split()[0]
+ Result0 = Result[0]
+ if Result0.upper().startswith('STATIC'):
+ Result0 = Result0[6:].strip()
+ Index = Result0.find(TypeStart)
+ if Index != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_OPTIONAL_FUNCTIONAL_MODIFIER, '', FileTable, Result[1])
+
+ SqlStatement = """ select Modifier, ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ ReturnType = GetDataTypeFromModifier(Result[0])
+ TypeStart = ReturnType.split()[0]
+ Result0 = Result[0]
+ if Result0.upper().startswith('STATIC'):
+ Result0 = Result0[6:].strip()
+ Index = Result0.find(TypeStart)
+ if Index != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_OPTIONAL_FUNCTIONAL_MODIFIER, '', 'Function', Result[1])
+
+def CheckFuncLayoutName(FullFileName):
+ ErrorMsgList = []
+ # Parameter variable format pattern.
+ Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
+ ParamIgnoreList = ('VOID', '...')
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Name, ID, EndColumn, Value
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ FuncName = Result[3]
+ if EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, FuncName):
+ continue
+ if Result[2] != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Function name [%s] should appear at the start of a line' % FuncName, FileTable, Result[1])
+ ParamList = GetParamList(Result[0])
+ if len(ParamList) == 0:
+ continue
+ StartLine = 0
+ for Param in ParamList:
+ if Param.StartLine <= StartLine:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Parameter %s should be in its own line.' % Param.Name, FileTable, Result[1])
+ if Param.StartLine - StartLine > 1:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Empty line appears before Parameter %s.' % Param.Name, FileTable, Result[1])
+ if not Pattern.match(Param.Name) and not Param.Name in ParamIgnoreList and not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Param.Name):
+ PrintErrorMsg(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, 'Parameter [%s] NOT follow naming convention.' % Param.Name, FileTable, Result[1])
+ StartLine = Param.StartLine
+
+ if not Result[0].endswith('\n )') and not Result[0].endswith('\r )'):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, '\')\' should be on a new line and indented two spaces', FileTable, Result[1])
+
+ SqlStatement = """ select Modifier, ID, FunNameStartColumn, Name
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ FuncName = Result[3]
+ if EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, FuncName):
+ continue
+ if Result[2] != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Function name [%s] should appear at the start of a line' % FuncName, 'Function', Result[1])
+ ParamList = GetParamList(Result[0])
+ if len(ParamList) == 0:
+ continue
+ StartLine = 0
+ for Param in ParamList:
+ if Param.StartLine <= StartLine:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Parameter %s should be in its own line.' % Param.Name, 'Function', Result[1])
+ if Param.StartLine - StartLine > 1:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Empty line appears before Parameter %s.' % Param.Name, 'Function', Result[1])
+ if not Pattern.match(Param.Name) and not Param.Name in ParamIgnoreList and not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Param.Name):
+ PrintErrorMsg(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, 'Parameter [%s] NOT follow naming convention.' % Param.Name, FileTable, Result[1])
+ StartLine = Param.StartLine
+ if not Result[0].endswith('\n )') and not Result[0].endswith('\r )'):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, '\')\' should be on a new line and indented two spaces', 'Function', Result[1])
+
+def CheckFuncLayoutPrototype(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ FileTable = 'Identifier' + str(FileID)
+ Db = GetDB()
+ SqlStatement = """ select Modifier, Header, Name, ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return ErrorMsgList
+
+ FuncDefList = []
+ for Result in ResultSet:
+ FuncDefList.append(Result)
+
+ SqlStatement = """ select Modifier, Name, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ FuncDeclList = []
+ for Result in ResultSet:
+ FuncDeclList.append(Result)
+
+ UndeclFuncList = []
+ for FuncDef in FuncDefList:
+ FuncName = FuncDef[2].strip()
+ FuncModifier = FuncDef[0]
+ FuncDefHeader = FuncDef[1]
+ for FuncDecl in FuncDeclList:
+ LBPos = FuncDecl[1].find('(')
+ DeclName = FuncDecl[1][0:LBPos].strip()
+ DeclModifier = FuncDecl[0]
+ if DeclName == FuncName:
+ if DiffModifier(FuncModifier, DeclModifier) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, FuncName):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, 'Function [%s] modifier different with prototype.' % FuncName, 'Function', FuncDef[3])
+ ParamListOfDef = GetParamList(FuncDefHeader)
+ ParamListOfDecl = GetParamList(FuncDecl[1])
+ if len(ParamListOfDef) != len(ParamListOfDecl) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_2, FuncName):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_2, 'Parameter number different in function [%s].' % FuncName, 'Function', FuncDef[3])
+ break
+
+ Index = 0
+ while Index < len(ParamListOfDef):
+ if DiffModifier(ParamListOfDef[Index].Modifier, ParamListOfDecl[Index].Modifier) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_3, FuncName):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_3, 'Parameter %s has different modifier with prototype in function [%s].' % (ParamListOfDef[Index].Name, FuncName), 'Function', FuncDef[3])
+ Index += 1
+ break
+ else:
+ UndeclFuncList.append(FuncDef)
+
+ IncludeFileList = GetAllIncludeFiles(FullFileName)
+ FuncDeclList = []
+ for F in IncludeFileList:
+ FileID = GetTableID(F, ErrorMsgList)
+ if FileID < 0:
+ continue
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, Name, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ FuncDeclList.append(Result)
+
+ for FuncDef in UndeclFuncList:
+ FuncName = FuncDef[2].strip()
+ FuncModifier = FuncDef[0]
+ FuncDefHeader = FuncDef[1]
+ for FuncDecl in FuncDeclList:
+ LBPos = FuncDecl[1].find('(')
+ DeclName = FuncDecl[1][0:LBPos].strip()
+ DeclModifier = FuncDecl[0]
+ if DeclName == FuncName:
+ if DiffModifier(FuncModifier, DeclModifier) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, FuncName):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, 'Function [%s] modifier different with prototype.' % FuncName, 'Function', FuncDef[3])
+ ParamListOfDef = GetParamList(FuncDefHeader)
+ ParamListOfDecl = GetParamList(FuncDecl[1])
+ if len(ParamListOfDef) != len(ParamListOfDecl) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_2, FuncName):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_2, 'Parameter number different in function [%s].' % FuncName, 'Function', FuncDef[3])
+ break
+
+ Index = 0
+ while Index < len(ParamListOfDef):
+ if DiffModifier(ParamListOfDef[Index].Modifier, ParamListOfDecl[Index].Modifier) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_3, FuncName):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_3, 'Parameter %s has different modifier with prototype in function [%s].' % (ParamListOfDef[Index].Name, FuncName), 'Function', FuncDef[3])
+ Index += 1
+ break
+
+def CheckFuncLayoutBody(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ FileTable = 'Identifier' + str(FileID)
+ Db = GetDB()
+ SqlStatement = """ select BodyStartColumn, EndColumn, ID, Name
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return ErrorMsgList
+ for Result in ResultSet:
+ if Result[0] != 0:
+ if not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY, Result[3]):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY,
+ 'The open brace should be at the very beginning of a line for the function [%s].' % Result[3],
+ 'Function', Result[2])
+ if Result[1] != 0:
+ if not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY, Result[3]):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY,
+ 'The close brace should be at the very beginning of a line for the function [%s].' % Result[3],
+ 'Function', Result[2])
+
+def CheckFuncLayoutLocalVariable(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return ErrorMsgList
+ FL = []
+ for Result in ResultSet:
+ FL.append(Result)
+
+ for F in FL:
+ SqlStatement = """ select Name, Value, ID, Modifier
+ from %s
+ where Model = %d and BelongsToFunction = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE, F[0])
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ continue
+
+ for Result in ResultSet:
+ if len(Result[1]) > 0 and 'CONST' not in Result[3] and 'STATIC' not in Result[3]:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_NO_INIT_OF_VARIABLE, 'Variable Name: %s' % Result[0], FileTable, Result[2])
+
+def CheckMemberVariableFormat(Name, Value, FileTable, TdId, ModelId):
+ ErrMsgList = []
+ # Member variable format pattern.
+ Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
+
+ LBPos = Value.find('{')
+ RBPos = Value.rfind('}')
+ if LBPos == -1 or RBPos == -1:
+ return ErrMsgList
+
+ Fields = Value[LBPos + 1 : RBPos]
+ Fields = StripComments(Fields).strip()
+ NestPos = Fields.find ('struct')
+ if NestPos != -1 and (NestPos + len('struct') < len(Fields)) and ModelId != DataClass.MODEL_IDENTIFIER_UNION:
+ if not Fields[NestPos + len('struct') + 1].isalnum():
+ if not EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, Name):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, 'Nested struct in [%s].' % (Name), FileTable, TdId)
+ return ErrMsgList
+ NestPos = Fields.find ('union')
+ if NestPos != -1 and (NestPos + len('union') < len(Fields)):
+ if not Fields[NestPos + len('union') + 1].isalnum():
+ if not EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, Name):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, 'Nested union in [%s].' % (Name), FileTable, TdId)
+ return ErrMsgList
+ NestPos = Fields.find ('enum')
+ if NestPos != -1 and (NestPos + len('enum') < len(Fields)):
+ if not Fields[NestPos + len('enum') + 1].isalnum():
+ if not EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, Name):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, 'Nested enum in [%s].' % (Name), FileTable, TdId)
+ return ErrMsgList
+
+ if ModelId == DataClass.MODEL_IDENTIFIER_ENUMERATE:
+ FieldsList = Fields.split(',')
+ # deal with enum is pre-assigned a value by function call ( , , , ...)
+ QuoteCount = 0
+ Index = 0
+ RemoveCurrentElement = False
+ while Index < len(FieldsList):
+ Field = FieldsList[Index]
+
+ if Field.find('(') != -1:
+ QuoteCount += 1
+ RemoveCurrentElement = True
+ Index += 1
+ continue
+
+ if Field.find(')') != -1 and QuoteCount > 0:
+ QuoteCount -= 1
+
+ if RemoveCurrentElement:
+ FieldsList.remove(Field)
+ if QuoteCount == 0:
+ RemoveCurrentElement = False
+ continue
+
+ if QuoteCount == 0:
+ RemoveCurrentElement = False
+
+ Index += 1
+ else:
+ FieldsList = Fields.split(';')
+
+ for Field in FieldsList:
+ Field = Field.strip()
+ if Field == '':
+ continue
+ # For the condition that the field in struct is an array with [] suffixes...
+ if Field[-1] == ']':
+ LBPos = Field.find('[')
+ Field = Field[0:LBPos]
+ # For the condition that bit field ": Number"
+ if Field.find(':') != -1:
+ ColonPos = Field.find(':')
+ Field = Field[0:ColonPos]
+
+ Field = Field.strip()
+ if Field == '':
+ continue
+ if Field.startswith("#"):
+ continue
+ # Enum could directly assign value to variable
+ Field = Field.split('=')[0].strip()
+ TokenList = Field.split()
+ # Remove pointers before variable
+ Token = TokenList[-1]
+ if Token in ['OPTIONAL']:
+ Token = TokenList[-2]
+ if not Pattern.match(Token.lstrip('*')):
+ ErrMsgList.append(Token.lstrip('*'))
+
+ return ErrMsgList
+
+def CheckDeclTypedefFormat(FullFileName, ModelId):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Name, StartLine, EndLine, ID, Value
+ from %s
+ where Model = %d
+ """ % (FileTable, ModelId)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ ResultList = []
+ for Result in ResultSet:
+ ResultList.append(Result)
+
+ ErrorType = ERROR_DECLARATION_DATA_TYPE_CHECK_ALL
+ if ModelId == DataClass.MODEL_IDENTIFIER_STRUCTURE:
+ ErrorType = ERROR_DECLARATION_DATA_TYPE_CHECK_STRUCTURE_DECLARATION
+ elif ModelId == DataClass.MODEL_IDENTIFIER_ENUMERATE:
+ ErrorType = ERROR_DECLARATION_DATA_TYPE_CHECK_ENUMERATED_TYPE
+ elif ModelId == DataClass.MODEL_IDENTIFIER_UNION:
+ ErrorType = ERROR_DECLARATION_DATA_TYPE_CHECK_UNION_TYPE
+
+ SqlStatement = """ select Modifier, Name, Value, StartLine, EndLine, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_TYPEDEF)
+ TdSet = Db.TblFile.Exec(SqlStatement)
+ TdList = []
+ for Td in TdSet:
+ TdList.append(Td)
+ # Check member variable name format that from typedefs of ONLY this file.
+ for Td in TdList:
+ Name = Td[1].strip()
+ Value = Td[2].strip()
+ if Value.startswith('enum'):
+ ValueModelId = DataClass.MODEL_IDENTIFIER_ENUMERATE
+ elif Value.startswith('struct'):
+ ValueModelId = DataClass.MODEL_IDENTIFIER_STRUCTURE
+ elif Value.startswith('union'):
+ ValueModelId = DataClass.MODEL_IDENTIFIER_UNION
+ else:
+ continue
+
+ if ValueModelId != ModelId:
+ continue
+ # Check member variable format.
+ ErrMsgList = CheckMemberVariableFormat(Name, Value, FileTable, Td[5], ModelId)
+ for ErrMsg in ErrMsgList:
+ if EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Name + '.' + ErrMsg):
+ continue
+ PrintErrorMsg(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, 'Member variable [%s] NOT follow naming convention.' % (Name + '.' + ErrMsg), FileTable, Td[5])
+
+ # First check in current file to see whether struct/union/enum is typedef-ed.
+ UntypedefedList = []
+ for Result in ResultList:
+ # Check member variable format.
+ Name = Result[0].strip()
+ Value = Result[4].strip()
+ if Value.startswith('enum'):
+ ValueModelId = DataClass.MODEL_IDENTIFIER_ENUMERATE
+ elif Value.startswith('struct'):
+ ValueModelId = DataClass.MODEL_IDENTIFIER_STRUCTURE
+ elif Value.startswith('union'):
+ ValueModelId = DataClass.MODEL_IDENTIFIER_UNION
+ else:
+ continue
+
+ if ValueModelId != ModelId:
+ continue
+ ErrMsgList = CheckMemberVariableFormat(Name, Value, FileTable, Result[3], ModelId)
+ for ErrMsg in ErrMsgList:
+ if EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Result[0] + '.' + ErrMsg):
+ continue
+ PrintErrorMsg(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, 'Member variable [%s] NOT follow naming convention.' % (Result[0] + '.' + ErrMsg), FileTable, Result[3])
+ # Check whether it is typedefed.
+ Found = False
+ for Td in TdList:
+ # skip function pointer
+ if len(Td[0]) > 0:
+ continue
+ if Result[1] >= Td[3] and Td[4] >= Result[2]:
+ Found = True
+ if not Td[1].isupper():
+ PrintErrorMsg(ErrorType, 'Typedef should be UPPER case', FileTable, Td[5])
+ if Result[0] in Td[2].split():
+ Found = True
+ if not Td[1].isupper():
+ PrintErrorMsg(ErrorType, 'Typedef should be UPPER case', FileTable, Td[5])
+ if Found:
+ break
+
+ if not Found:
+ UntypedefedList.append(Result)
+ continue
+
+ if len(UntypedefedList) == 0:
+ return
+
+ IncludeFileList = GetAllIncludeFiles(FullFileName)
+ TdList = []
+ for F in IncludeFileList:
+ FileID = GetTableID(F, ErrorMsgList)
+ if FileID < 0:
+ continue
+
+ IncludeFileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, Name, Value, StartLine, EndLine, ID
+ from %s
+ where Model = %d
+ """ % (IncludeFileTable, DataClass.MODEL_IDENTIFIER_TYPEDEF)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ TdList.extend(ResultSet)
+
+ for Result in UntypedefedList:
+
+ # Check whether it is typedefed.
+ Found = False
+ for Td in TdList:
+
+ if len(Td[0]) > 0:
+ continue
+ if Result[1] >= Td[3] and Td[4] >= Result[2]:
+ Found = True
+ if not Td[1].isupper():
+ PrintErrorMsg(ErrorType, 'Typedef should be UPPER case', FileTable, Td[5])
+ if Result[0] in Td[2].split():
+ Found = True
+ if not Td[1].isupper():
+ PrintErrorMsg(ErrorType, 'Typedef should be UPPER case', FileTable, Td[5])
+ if Found:
+ break
+
+ if not Found:
+ PrintErrorMsg(ErrorType, 'No Typedef for %s' % Result[0], FileTable, Result[3])
+ continue
+
+def CheckDeclStructTypedef(FullFileName):
+ CheckDeclTypedefFormat(FullFileName, DataClass.MODEL_IDENTIFIER_STRUCTURE)
+
+def CheckDeclEnumTypedef(FullFileName):
+ CheckDeclTypedefFormat(FullFileName, DataClass.MODEL_IDENTIFIER_ENUMERATE)
+
+def CheckDeclUnionTypedef(FullFileName):
+ CheckDeclTypedefFormat(FullFileName, DataClass.MODEL_IDENTIFIER_UNION)
+
+def CheckDeclArgModifier(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, Name, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ ModifierTuple = ('IN', 'OUT', 'OPTIONAL', 'UNALIGNED')
+ MAX_MODIFIER_LENGTH = 100
+ for Result in ResultSet:
+ for Modifier in ModifierTuple:
+ if PatternInModifier(Result[0], Modifier) and len(Result[0]) < MAX_MODIFIER_LENGTH:
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER, 'Variable Modifier %s' % Result[0], FileTable, Result[2])
+ break
+
+ SqlStatement = """ select Modifier, Name, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ for Modifier in ModifierTuple:
+ if PatternInModifier(Result[0], Modifier):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER, 'Return Type Modifier %s' % Result[0], FileTable, Result[2])
+ break
+
+ SqlStatement = """ select Modifier, Header, ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ for Modifier in ModifierTuple:
+ if PatternInModifier(Result[0], Modifier):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER, 'Return Type Modifier %s' % Result[0], FileTable, Result[2])
+ break
+
+def CheckDeclNoUseCType(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, Name, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ CTypeTuple = ('int', 'unsigned', 'char', 'void', 'long')
+ for Result in ResultSet:
+ for Type in CTypeTuple:
+ if PatternInModifier(Result[0], Type):
+ if EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE,
+ Result[0] + ' ' + Result[1]):
+ continue
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE,
+ 'Invalid variable type (%s) in definition [%s]' % (Type, Result[0] + ' ' + Result[1]),
+ FileTable,
+ Result[2])
+ break
+
+ SqlStatement = """ select Modifier, Name, ID, Value
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ ParamList = GetParamList(Result[1])
+ FuncName = Result[3]
+ if EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, FuncName):
+ continue
+ for Type in CTypeTuple:
+ if PatternInModifier(Result[0], Type):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, '%s Return type %s' % (FuncName, Result[0]), FileTable, Result[2])
+
+ for Param in ParamList:
+ if PatternInModifier(Param.Modifier, Type):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, 'Parameter %s' % Param.Name, FileTable, Result[2])
+
+ SqlStatement = """ select Modifier, Header, ID, Name
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ ParamList = GetParamList(Result[1])
+ FuncName = Result[3]
+ if EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, FuncName):
+ continue
+ for Type in CTypeTuple:
+ if PatternInModifier(Result[0], Type):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, '[%s] Return type %s' % (FuncName, Result[0]), FileTable, Result[2])
+
+ for Param in ParamList:
+ if PatternInModifier(Param.Modifier, Type):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, 'Parameter %s' % Param.Name, FileTable, Result[2])
+
+
+def CheckPointerNullComparison(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ # cache the found function return type to accelerate later checking in this file.
+ FuncReturnTypeDict = {}
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, StartLine, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_PREDICATE_EXPRESSION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return
+ PSL = []
+ for Result in ResultSet:
+ PSL.append([Result[0], Result[1], Result[2]])
+
+ SqlStatement = """ select BodyStartLine, EndLine, Header, Modifier, ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ FL = []
+ for Result in ResultSet:
+ FL.append([Result[0], Result[1], Result[2], Result[3], Result[4]])
+
+ p = GetFuncDeclPattern()
+ for Str in PSL:
+ FuncRecord = GetFuncContainsPE(Str[1], FL)
+ if FuncRecord is None:
+ continue
+
+ for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
+ PredInfo = SplitPredicateStr(Exp)
+ if PredInfo[1] is None:
+ PredVarStr = PredInfo[0][0].strip()
+ IsFuncCall = False
+ SearchInCache = False
+ # PredVarStr may contain '.' or '->'
+ TmpStr = PredVarStr.replace('.', '').replace('->', '')
+ if p.match(TmpStr):
+ PredVarStr = PredVarStr[0:PredVarStr.find('(')]
+ SearchInCache = True
+ # Only direct function call using IsFuncCall branch. Multi-level ref. function call is considered a variable.
+ if TmpStr.startswith(PredVarStr):
+ IsFuncCall = True
+
+ if PredVarStr.strip() in IgnoredKeywordList:
+ continue
+ StarList = []
+ PredVarList = GetCNameList(PredVarStr, StarList)
+ # No variable found, maybe value first? like (0 == VarName)
+ if len(PredVarList) == 0:
+ continue
+ if SearchInCache:
+ Type = FuncReturnTypeDict.get(PredVarStr)
+ if Type is not None:
+ if Type.find('*') != -1 and Type != 'BOOLEAN*':
+ PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
+ continue
+
+ if PredVarStr in FuncReturnTypeDict:
+ continue
+
+ Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, None, StarList)
+ if SearchInCache:
+ FuncReturnTypeDict[PredVarStr] = Type
+ if Type is None:
+ continue
+ Type = GetTypeFromArray(Type, PredVarStr)
+ if Type.find('*') != -1 and Type != 'BOOLEAN*':
+ PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
+
+def CheckNonBooleanValueComparison(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ # cache the found function return type to accelerate later checking in this file.
+ FuncReturnTypeDict = {}
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, StartLine, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_PREDICATE_EXPRESSION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return
+ PSL = []
+ for Result in ResultSet:
+ PSL.append([Result[0], Result[1], Result[2]])
+
+ SqlStatement = """ select BodyStartLine, EndLine, Header, Modifier, ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ FL = []
+ for Result in ResultSet:
+ FL.append([Result[0], Result[1], Result[2], Result[3], Result[4]])
+
+ p = GetFuncDeclPattern()
+ for Str in PSL:
+ FuncRecord = GetFuncContainsPE(Str[1], FL)
+ if FuncRecord is None:
+ continue
+
+ for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
+ PredInfo = SplitPredicateStr(Exp)
+ if PredInfo[1] is None:
+ PredVarStr = PredInfo[0][0].strip()
+ IsFuncCall = False
+ SearchInCache = False
+ # PredVarStr may contain '.' or '->'
+ TmpStr = PredVarStr.replace('.', '').replace('->', '')
+ if p.match(TmpStr):
+ PredVarStr = PredVarStr[0:PredVarStr.find('(')]
+ SearchInCache = True
+ # Only direct function call using IsFuncCall branch. Multi-level ref. function call is considered a variable.
+ if TmpStr.startswith(PredVarStr):
+ IsFuncCall = True
+
+ if PredVarStr.strip() in IgnoredKeywordList:
+ continue
+ StarList = []
+ PredVarList = GetCNameList(PredVarStr, StarList)
+ # No variable found, maybe value first? like (0 == VarName)
+ if len(PredVarList) == 0:
+ continue
+
+ if SearchInCache:
+ Type = FuncReturnTypeDict.get(PredVarStr)
+ if Type is not None:
+ if Type.find('BOOLEAN') == -1:
+ PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
+ continue
+
+ if PredVarStr in FuncReturnTypeDict:
+ continue
+ Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)
+ if SearchInCache:
+ FuncReturnTypeDict[PredVarStr] = Type
+ if Type is None:
+ continue
+ if Type.find('BOOLEAN') == -1:
+ PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
+
+
+def CheckBooleanValueComparison(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ # cache the found function return type to accelerate later checking in this file.
+ FuncReturnTypeDict = {}
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, StartLine, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_PREDICATE_EXPRESSION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return
+ PSL = []
+ for Result in ResultSet:
+ PSL.append([Result[0], Result[1], Result[2]])
+
+ SqlStatement = """ select BodyStartLine, EndLine, Header, Modifier, ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ FL = []
+ for Result in ResultSet:
+ FL.append([Result[0], Result[1], Result[2], Result[3], Result[4]])
+
+ p = GetFuncDeclPattern()
+ for Str in PSL:
+ FuncRecord = GetFuncContainsPE(Str[1], FL)
+ if FuncRecord is None:
+ continue
+
+ for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
+ PredInfo = SplitPredicateStr(Exp)
+ if PredInfo[1] in ('==', '!=') and PredInfo[0][1] in ('TRUE', 'FALSE'):
+ PredVarStr = PredInfo[0][0].strip()
+ IsFuncCall = False
+ SearchInCache = False
+ # PredVarStr may contain '.' or '->'
+ TmpStr = PredVarStr.replace('.', '').replace('->', '')
+ if p.match(TmpStr):
+ PredVarStr = PredVarStr[0:PredVarStr.find('(')]
+ SearchInCache = True
+ # Only direct function call using IsFuncCall branch. Multi-level ref. function call is considered a variable.
+ if TmpStr.startswith(PredVarStr):
+ IsFuncCall = True
+
+ if PredVarStr.strip() in IgnoredKeywordList:
+ continue
+ StarList = []
+ PredVarList = GetCNameList(PredVarStr, StarList)
+ # No variable found, maybe value first? like (0 == VarName)
+ if len(PredVarList) == 0:
+ continue
+
+ if SearchInCache:
+ Type = FuncReturnTypeDict.get(PredVarStr)
+ if Type is not None:
+ if Type.find('BOOLEAN') != -1:
+ PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
+ continue
+
+ if PredVarStr in FuncReturnTypeDict:
+ continue
+
+ Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)
+ if SearchInCache:
+ FuncReturnTypeDict[PredVarStr] = Type
+ if Type is None:
+ continue
+ if Type.find('BOOLEAN') != -1:
+ PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
+
+
+def CheckHeaderFileData(FullFileName, AllTypedefFun=[]):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select ID, Modifier
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ if not Result[1].startswith('extern'):
+ for Item in AllTypedefFun:
+ if '(%s)' % Result[1] in Item:
+ break
+ else:
+ PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_DATA, 'Variable definition appears in header file', FileTable, Result[0])
+
+ SqlStatement = """ select ID
+ from Function
+ where BelongsToFile = %d
+ """ % FileID
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_DATA, 'Function definition appears in header file', 'Function', Result[0])
+
+ return ErrorMsgList
+
+def CheckHeaderFileIfndef(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, StartLine
+ from %s
+ where Model = %d order by StartLine
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_MACRO_IFNDEF)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_1, '', 'File', FileID)
+ return ErrorMsgList
+ for Result in ResultSet:
+ SqlStatement = """ select Value, EndLine
+ from %s
+ where EndLine < %d
+ """ % (FileTable, Result[1])
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ if not Result[0].startswith('/*') and not Result[0].startswith('//'):
+ PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_2, '', 'File', FileID)
+ break
+
+ SqlStatement = """ select Value
+ from %s
+ where StartLine > (select max(EndLine) from %s where Model = %d)
+ """ % (FileTable, FileTable, DataClass.MODEL_IDENTIFIER_MACRO_ENDIF)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ if not Result[0].startswith('/*') and not Result[0].startswith('//'):
+ PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_3, '', 'File', FileID)
+ return ErrorMsgList
+
+def CheckDoxygenCommand(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, ID
+ from %s
+ where Model = %d or Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ DoxygenCommandList = ['bug', 'todo', 'example', 'file', 'attention', 'param', 'post', 'pre', 'retval',
+ 'return', 'sa', 'since', 'test', 'note', 'par', 'endcode', 'code']
+ for Result in ResultSet:
+ CommentStr = Result[0]
+ CommentPartList = CommentStr.split()
+ for Part in CommentPartList:
+ if Part.upper() == 'BUGBUG':
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'Bug should be marked with doxygen tag @bug', FileTable, Result[1])
+ if Part.upper() == 'TODO':
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'ToDo should be marked with doxygen tag @todo', FileTable, Result[1])
+ if Part.startswith('@'):
+ if EccGlobalData.gException.IsException(ERROR_DOXYGEN_CHECK_COMMAND, Part):
+ continue
+ if not Part.replace('@', '').strip():
+ continue
+ if Part.lstrip('@') in ['{', '}']:
+ continue
+ if Part.lstrip('@').isalpha():
+ if Part.lstrip('@') not in DoxygenCommandList:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'Unknown doxygen command %s' % Part, FileTable, Result[1])
+ else:
+ Index = Part.find('[')
+ if Index == -1:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'Unknown doxygen command %s' % Part, FileTable, Result[1])
+ RealCmd = Part[1:Index]
+ if RealCmd not in DoxygenCommandList:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'Unknown doxygen command %s' % Part, FileTable, Result[1])
+
+
+def CheckDoxygenTripleForwardSlash(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+
+ SqlStatement = """ select ID, BodyStartLine, BodyStartColumn, EndLine, EndColumn
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return
+
+ FuncDefSet = []
+ for Result in ResultSet:
+ FuncDefSet.append(Result)
+
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, ID, StartLine, StartColumn, EndLine, EndColumn
+ from %s
+ where Model = %d
+
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ CommentSet = []
+ try:
+ for Result in ResultSet:
+ CommentSet.append(Result)
+ except:
+ print('Unrecognized chars in comment of file %s', FullFileName)
+
+
+ for Result in CommentSet:
+ CommentStr = Result[0]
+ StartLine = Result[2]
+ StartColumn = Result[3]
+ EndLine = Result[4]
+ EndColumn = Result[5]
+ if not CommentStr.startswith('///<'):
+ continue
+
+ Found = False
+ for FuncDef in FuncDefSet:
+ if StartLine == FuncDef[1] and StartColumn > FuncDef[2] and EndLine == FuncDef[3] and EndColumn < FuncDef[4]:
+ Found = True
+ break
+ if StartLine > FuncDef[1] and EndLine < FuncDef[3]:
+ Found = True
+ break
+ if StartLine == FuncDef[1] and StartColumn > FuncDef[2] and EndLine < FuncDef[3]:
+ Found = True
+ break
+ if StartLine > FuncDef[1] and EndLine == FuncDef[3] and EndColumn < FuncDef[4]:
+ Found = True
+ break
+ if Found:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_FORMAT, '', FileTable, Result[1])
+
+
+def CheckFileHeaderDoxygenComments(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, ID
+ from %s
+ where Model = %d and (StartLine = 1 or StartLine = 7 or StartLine = 8) and StartColumn = 0
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'No File License header appear at the very beginning of file.', 'File', FileID)
+ return ErrorMsgList
+
+ NoHeaderCommentStartFlag = True
+ NoHeaderCommentEndFlag = True
+ NoHeaderCommentPeriodFlag = True
+ NoCopyrightFlag = True
+ NoLicenseFlag = True
+ NoRevReferFlag = True
+ NextLineIndex = 0
+ for Result in ResultSet:
+ FileStartFlag = False
+ CommentStrList = []
+ CommentStr = Result[0].strip()
+ CommentStrListTemp = CommentStr.split('\n')
+ if (len(CommentStrListTemp) <= 1):
+ # For Mac
+ CommentStrListTemp = CommentStr.split('\r')
+ # Skip the content before the file header
+ for CommentLine in CommentStrListTemp:
+ if CommentLine.strip().startswith('/** @file'):
+ FileStartFlag = True
+ if FileStartFlag == True:
+ CommentStrList.append(CommentLine)
+
+ ID = Result[1]
+ Index = 0
+ if CommentStrList and CommentStrList[0].strip().startswith('/** @file'):
+ NoHeaderCommentStartFlag = False
+ else:
+ continue
+ if CommentStrList and CommentStrList[-1].strip().endswith('**/'):
+ NoHeaderCommentEndFlag = False
+ else:
+ continue
+
+ for CommentLine in CommentStrList:
+ Index = Index + 1
+ NextLineIndex = Index
+ if CommentLine.startswith('/** @file'):
+ continue
+ if CommentLine.startswith('**/'):
+ break
+ # Check whether C File header Comment content start with two spaces.
+ if EccGlobalData.gConfig.HeaderCheckCFileCommentStartSpacesNum == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ if CommentLine.startswith('/** @file') == False and CommentLine.startswith('**/') == False and CommentLine.strip() and CommentLine.startswith(' ') == False:
+ PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment content should start with two spaces at each line', FileTable, ID)
+
+ CommentLine = CommentLine.strip()
+ if CommentLine.startswith('Copyright') or ('Copyright' in CommentLine and CommentLine.lower().startswith('(c)')):
+ NoCopyrightFlag = False
+ if CommentLine.find('All rights reserved') == -1:
+ for Copyright in EccGlobalData.gConfig.Copyright:
+ if CommentLine.find(Copyright) > -1:
+ PrintErrorMsg(ERROR_HEADER_CHECK_FILE, '""All rights reserved"" announcement should be following the ""Copyright"" at the same line', FileTable, ID)
+ break
+ if CommentLine.endswith('<BR>') == -1:
+ PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'The ""<BR>"" at the end of the Copyright line is required', FileTable, ID)
+ if NextLineIndex < len(CommentStrList) and CommentStrList[NextLineIndex].strip().startswith('Copyright') == False and CommentStrList[NextLineIndex].strip():
+ NoLicenseFlag = False
+ if CommentLine.startswith('@par Revision Reference:'):
+ NoRevReferFlag = False
+ RefListFlag = False
+ for RefLine in CommentStrList[NextLineIndex:]:
+ if RefLine.strip() and (NextLineIndex + 1) < len(CommentStrList) and CommentStrList[NextLineIndex+1].strip() and CommentStrList[NextLineIndex+1].strip().startswith('**/') == False:
+ RefListFlag = True
+ if RefLine.strip() == False or RefLine.strip().startswith('**/'):
+ RefListFlag = False
+ break
+ # Check whether C File header Comment's each reference at list should begin with a bullet character.
+ if EccGlobalData.gConfig.HeaderCheckCFileCommentReferenceFormat == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ if RefListFlag == True:
+ if RefLine.strip() and RefLine.strip().startswith('**/') == False and RefLine.startswith(' -') == False:
+ PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'Each reference on a separate line should begin with a bullet character ""-"" ', FileTable, ID)
+
+ if NoHeaderCommentStartFlag:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, ID)
+ return
+ if NoHeaderCommentEndFlag:
+ PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should end with ""**/""', FileTable, ID)
+ return
+ if NoCopyrightFlag:
+ PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment missing the ""Copyright""', FileTable, ID)
+ #Check whether C File header Comment have the License immediately after the ""Copyright"" line.
+ if EccGlobalData.gConfig.HeaderCheckCFileCommentLicenseFormat == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ if NoLicenseFlag:
+ PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should have the License immediately after the ""Copyright"" line', FileTable, ID)
+
+def CheckFuncHeaderDoxygenComments(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, StartLine, EndLine, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)
+
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ CommentSet = []
+ try:
+ for Result in ResultSet:
+ CommentSet.append(Result)
+ except:
+ print('Unrecognized chars in comment of file %s', FullFileName)
+
+ # Func Decl check
+ SqlStatement = """ select Modifier, Name, StartLine, ID, Value
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ FuncName = Result[4]
+ FunctionHeaderComment = CheckCommentImmediatelyPrecedeFunctionHeader(Result[1], Result[2], CommentSet)
+ if FunctionHeaderComment:
+ CheckFunctionHeaderConsistentWithDoxygenComment(Result[0], Result[1], Result[2], FunctionHeaderComment[0], FunctionHeaderComment[1], ErrorMsgList, FunctionHeaderComment[3], FileTable)
+ else:
+ if EccGlobalData.gException.IsException(ERROR_HEADER_CHECK_FUNCTION, FuncName):
+ continue
+ ErrorMsgList.append('Line %d :Function %s has NO comment immediately preceding it.' % (Result[2], Result[1]))
+ PrintErrorMsg(ERROR_HEADER_CHECK_FUNCTION, 'Function [%s] has NO comment immediately preceding it.' % (FuncName), FileTable, Result[3])
+
+ # Func Def check
+ SqlStatement = """ select Value, StartLine, EndLine, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER)
+
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ CommentSet = []
+ try:
+ for Result in ResultSet:
+ CommentSet.append(Result)
+ except:
+ print('Unrecognized chars in comment of file %s', FullFileName)
+
+ SqlStatement = """ select Modifier, Header, StartLine, ID, Name
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ FuncName = Result[4]
+ FunctionHeaderComment = CheckCommentImmediatelyPrecedeFunctionHeader(Result[1], Result[2], CommentSet)
+ if FunctionHeaderComment:
+ CheckFunctionHeaderConsistentWithDoxygenComment(Result[0], Result[1], Result[2], FunctionHeaderComment[0], FunctionHeaderComment[1], ErrorMsgList, FunctionHeaderComment[3], FileTable)
+ else:
+ if EccGlobalData.gException.IsException(ERROR_HEADER_CHECK_FUNCTION, FuncName):
+ continue
+ ErrorMsgList.append('Line %d :Function [%s] has NO comment immediately preceding it.' % (Result[2], Result[1]))
+ PrintErrorMsg(ERROR_HEADER_CHECK_FUNCTION, 'Function [%s] has NO comment immediately preceding it.' % (FuncName), 'Function', Result[3])
+ return ErrorMsgList
+
+def CheckCommentImmediatelyPrecedeFunctionHeader(FuncName, FuncStartLine, CommentSet):
+
+ for Comment in CommentSet:
+ if Comment[2] == FuncStartLine - 1:
+ return Comment
+ return None
+
+def GetDoxygenStrFromComment(Str):
+ DoxygenStrList = []
+ ParamTagList = Str.split('@param')
+ if len(ParamTagList) > 1:
+ i = 1
+ while i < len(ParamTagList):
+ DoxygenStrList.append('@param' + ParamTagList[i])
+ i += 1
+
+ Str = ParamTagList[0]
+
+ RetvalTagList = ParamTagList[-1].split('@retval')
+ if len(RetvalTagList) > 1:
+ if len(ParamTagList) > 1:
+ DoxygenStrList[-1] = '@param' + RetvalTagList[0]
+ i = 1
+ while i < len(RetvalTagList):
+ DoxygenStrList.append('@retval' + RetvalTagList[i])
+ i += 1
+
+ ReturnTagList = RetvalTagList[-1].split('@return')
+ if len(ReturnTagList) > 1:
+ if len(RetvalTagList) > 1:
+ DoxygenStrList[-1] = '@retval' + ReturnTagList[0]
+ elif len(ParamTagList) > 1:
+ DoxygenStrList[-1] = '@param' + ReturnTagList[0]
+ i = 1
+ while i < len(ReturnTagList):
+ DoxygenStrList.append('@return' + ReturnTagList[i])
+ i += 1
+
+ if len(DoxygenStrList) > 0:
+ DoxygenStrList[-1] = DoxygenStrList[-1].rstrip('--*/')
+
+ return DoxygenStrList
+
+def CheckGeneralDoxygenCommentLayout(Str, StartLine, ErrorMsgList, CommentId= -1, TableName=''):
+ #/** --*/ @retval after @param
+ if not Str.startswith('/**'):
+ ErrorMsgList.append('Line %d : Comment does NOT have prefix /** ' % StartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'Comment does NOT have prefix /** ', TableName, CommentId)
+ if not Str.endswith('**/'):
+ ErrorMsgList.append('Line %d : Comment does NOT have tail **/ ' % StartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'Comment does NOT have tail **/ ', TableName, CommentId)
+ FirstRetvalIndex = Str.find('@retval')
+ LastParamIndex = Str.rfind('@param')
+ if (FirstRetvalIndex > 0) and (LastParamIndex > 0) and (FirstRetvalIndex < LastParamIndex):
+ ErrorMsgList.append('Line %d : @retval appear before @param ' % StartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, @retval appear before @param ', TableName, CommentId)
+
+def CheckFunctionHeaderConsistentWithDoxygenComment(FuncModifier, FuncHeader, FuncStartLine, CommentStr, CommentStartLine, ErrorMsgList, CommentId= -1, TableName=''):
+
+ ParamList = GetParamList(FuncHeader)
+ CheckGeneralDoxygenCommentLayout(CommentStr, CommentStartLine, ErrorMsgList, CommentId, TableName)
+ DescriptionStr = CommentStr
+ DoxygenStrList = GetDoxygenStrFromComment(DescriptionStr)
+ if DescriptionStr.find('.') == -1:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION, 'Comment description should end with period \'.\'', TableName, CommentId)
+ DoxygenTagNumber = len(DoxygenStrList)
+ ParamNumber = len(ParamList)
+ for Param in ParamList:
+ if Param.Name.upper() == 'VOID' and ParamNumber == 1:
+ ParamNumber -= 1
+ Index = 0
+ if ParamNumber > 0 and DoxygenTagNumber > 0:
+ while Index < ParamNumber and Index < DoxygenTagNumber:
+ ParamModifier = ParamList[Index].Modifier
+ ParamName = ParamList[Index].Name.strip()
+ Tag = DoxygenStrList[Index].strip(' ')
+ if (not Tag[-1] == ('\n')) and (not Tag[-1] == ('\r')):
+ ErrorMsgList.append('Line %d : in Comment, <%s> does NOT end with new line ' % (CommentStartLine, Tag.replace('\n', '').replace('\r', '')))
+ PrintErrorMsg(ERROR_HEADER_CHECK_FUNCTION, 'in Comment, <%s> does NOT end with new line ' % (Tag.replace('\n', '').replace('\r', '')), TableName, CommentId)
+ TagPartList = Tag.split()
+ if len(TagPartList) < 2:
+ ErrorMsgList.append('Line %d : in Comment, <%s> does NOT contain doxygen contents ' % (CommentStartLine, Tag.replace('\n', '').replace('\r', '')))
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, <%s> does NOT contain doxygen contents ' % (Tag.replace('\n', '').replace('\r', '')), TableName, CommentId)
+ Index += 1
+ continue
+ LBPos = Tag.find('[')
+ RBPos = Tag.find(']')
+ ParamToLBContent = Tag[len('@param'):LBPos].strip()
+ if LBPos > 0 and len(ParamToLBContent) == 0 and RBPos > LBPos:
+ InOutStr = ''
+ ModifierPartList = ParamModifier.split()
+ for Part in ModifierPartList:
+ if Part.strip() == 'IN':
+ InOutStr += 'in'
+ if Part.strip() == 'OUT':
+ if InOutStr != '':
+ InOutStr += ', out'
+ else:
+ InOutStr = 'out'
+
+ if InOutStr != '':
+ if Tag.find('[' + InOutStr + ']') == -1:
+ if InOutStr != 'in, out':
+ ErrorMsgList.append('Line %d : in Comment, <%s> does NOT have %s ' % (CommentStartLine, (TagPartList[0] + ' ' + TagPartList[1]).replace('\n', '').replace('\r', ''), '[' + InOutStr + ']'))
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, <%s> does NOT have %s ' % ((TagPartList[0] + ' ' + TagPartList[1]).replace('\n', '').replace('\r', ''), '[' + InOutStr + ']'), TableName, CommentId)
+ else:
+ if Tag.find('[in,out]') == -1:
+ ErrorMsgList.append('Line %d : in Comment, <%s> does NOT have %s ' % (CommentStartLine, (TagPartList[0] + ' ' + TagPartList[1]).replace('\n', '').replace('\r', ''), '[' + InOutStr + ']'))
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, <%s> does NOT have %s ' % ((TagPartList[0] + ' ' + TagPartList[1]).replace('\n', '').replace('\r', ''), '[' + InOutStr + ']'), TableName, CommentId)
+
+
+ if Tag.find(ParamName) == -1 and ParamName != 'VOID' and ParamName != 'void':
+ ErrorMsgList.append('Line %d : in Comment, <%s> does NOT consistent with parameter name %s ' % (CommentStartLine, (TagPartList[0] + ' ' + TagPartList[1]).replace('\n', '').replace('\r', ''), ParamName))
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, <%s> does NOT consistent with parameter name %s ' % ((TagPartList[0] + ' ' + TagPartList[1]).replace('\n', '').replace('\r', ''), ParamName), TableName, CommentId)
+ Index += 1
+
+ if Index < ParamNumber:
+ ErrorMsgList.append('Line %d : Number of doxygen tags in comment less than number of function parameters' % CommentStartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'Number of doxygen tags in comment less than number of function parameters ', TableName, CommentId)
+ # VOID return type, NOT VOID*. VOID* should be matched with a doxygen tag.
+ if (FuncModifier.find('VOID') != -1 or FuncModifier.find('void') != -1) and FuncModifier.find('*') == -1:
+
+ # assume we allow a return description tag for void func. return. that's why 'DoxygenTagNumber - 1' is used instead of 'DoxygenTagNumber'
+ if Index < DoxygenTagNumber - 1 or (Index < DoxygenTagNumber and DoxygenStrList[Index].startswith('@retval')):
+ ErrorMsgList.append('Line %d : VOID return type need NO doxygen tags in comment' % CommentStartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'VOID return type need no doxygen tags in comment ', TableName, CommentId)
+ else:
+ if Index < DoxygenTagNumber and not DoxygenStrList[Index].startswith('@retval') and not DoxygenStrList[Index].startswith('@return'):
+ ErrorMsgList.append('Line %d : Number of @param doxygen tags in comment does NOT match number of function parameters' % CommentStartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'Number of @param doxygen tags in comment does NOT match number of function parameters ', TableName, CommentId)
+ else:
+ if ParamNumber == 0 and DoxygenTagNumber != 0 and ((FuncModifier.find('VOID') != -1 or FuncModifier.find('void') != -1) and FuncModifier.find('*') == -1):
+ ErrorMsgList.append('Line %d : VOID return type need NO doxygen tags in comment' % CommentStartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'VOID return type need NO doxygen tags in comment ', TableName, CommentId)
+ if ParamNumber != 0 and DoxygenTagNumber == 0:
+ ErrorMsgList.append('Line %d : No doxygen tags in comment' % CommentStartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'No doxygen tags in comment ', TableName, CommentId)
+
+if __name__ == '__main__':
+
+# EdkLogger.Initialize()
+# EdkLogger.SetLevel(EdkLogger.QUIET)
+# CollectSourceCodeDataIntoDB(sys.argv[1])
+ try:
+ test_file = sys.argv[1]
+ except IndexError as v:
+ print("Usage: %s filename" % sys.argv[0])
+ sys.exit(1)
+ MsgList = CheckFuncHeaderDoxygenComments(test_file)
+ for Msg in MsgList:
+ print(Msg)
+ print('Done!')
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/config.ini b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/config.ini
new file mode 100644
index 00000000..d588a42d
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/config.ini
@@ -0,0 +1,281 @@
+## @file
+# This file is used to set configuration of ECC tool
+# For the items listed below, 1 means valid, 0 means invalid
+#
+# Copyright (c) 2007 - 2015, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+#
+# Identify the version of current configuration
+#
+Version = 0.1
+
+#
+# Identify to if check all items
+# 1 - Check all items and ignore all other detailed items
+# 0 - Not check all items, the tool will go through all other detailed items to decide to check or not
+#
+CheckAll = 0
+
+#
+# Identify to if automatically correct mistakes
+# 1 - Automatically correct
+# 0 - Not automatically correct
+# Only the following check points can be automatically corrected, others not listed below are not supported even it is 1
+#
+# GeneralCheckTab
+# GeneralCheckIndentation
+# GeneralCheckLine
+# GeneralCheckCarriageReturn
+# SpaceCheckAll
+#
+AutoCorrect = 1
+
+#
+# List customized Modifer here, split with ','
+#
+ModifierList = IN, OUT, OPTIONAL, UNALIGNED, EFI_RUNTIMESERVICE, EFI_BOOTSERVICE, EFIAPI, TPMINTERNALAPI, STATIC
+
+#
+# General Checking
+#
+GeneralCheckAll = 0
+
+# Check whether NO Tab is used, replaced with spaces
+GeneralCheckNoTab = 1
+# The width of Tab
+GeneralCheckTabWidth = 2
+# Check whether the indentation is followed coding style
+GeneralCheckIndentation = 1
+# The width of indentation
+GeneralCheckIndentationWidth = 2
+# Check whether no line is exceeding defined widty
+GeneralCheckLine = 1
+# The width of a line
+GeneralCheckLineWidth = 120
+# Check whether no use of _asm in the source file
+GeneralCheckNo_Asm = 1
+# Check whether no use of "#progma" in source file except "#pragma pack(#)".
+GeneralCheckNoProgma = 1
+# Check whether there is a carriage return at the end of the file
+GeneralCheckCarriageReturn = 1
+# Check whether the file exists
+GeneralCheckFileExistence = 1
+# Check whether file has non ACSII char
+GeneralCheckNonAcsii = 1
+# Check whether UNI file is valid
+GeneralCheckUni = 1
+# Check Only use CRLF (Carriage Return Line Feed) line endings.
+GeneralCheckLineEnding = 1
+# Check if there is no trailing white space in one line.
+GeneralCheckTrailingWhiteSpaceLine = 1
+
+#
+# Space Checking
+#
+SpaceCheckAll = 1
+
+#
+# Predicate Expression Checking
+#
+PredicateExpressionCheckAll = 0
+
+# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
+PredicateExpressionCheckBooleanValue = 1
+# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
+PredicateExpressionCheckNonBooleanOperator = 1
+# Check whether a comparison of any pointer to zero must be done via the NULL type
+PredicateExpressionCheckComparisonNullType = 1
+
+#
+# Headers Checking
+#
+HeaderCheckAll = 0
+
+# Check whether File header exists
+HeaderCheckFile = 1
+# Check whether Function header exists
+HeaderCheckFunction = 1
+# Check whether Meta data File header Comment End with '##'
+HeaderCheckFileCommentEnd = 0
+# Check whether C File header Comment content start with two spaces
+HeaderCheckCFileCommentStartSpacesNum = 0
+# Check whether C File header Comment's each reference at list should begin with a bullet character '-'
+HeaderCheckCFileCommentReferenceFormat = 0
+# Check whether C File header Comment have the License immediately after the ""Copyright"" line
+HeaderCheckCFileCommentLicenseFormat = 0
+
+#
+# C Function Layout Checking
+#
+CFunctionLayoutCheckAll = 0
+
+# Check whether return type exists and in the first line
+CFunctionLayoutCheckReturnType = 1
+# Check whether any optional functional modifiers exist and next to the return type
+CFunctionLayoutCheckOptionalFunctionalModifier = 1
+# Check whether the next line contains the function name, left justified, followed by the beginning of the parameter list
+# Check whether the closing parenthesis is on its own line and also indented two spaces
+CFunctionLayoutCheckFunctionName = 1
+# Check whether the function prototypes in include files have the same form as function definitions
+CFunctionLayoutCheckFunctionPrototype = 1
+# Check whether the body of a function is contained by open and close braces that must be in the first column
+CFunctionLayoutCheckFunctionBody = 1
+# Check whether the data declarations is the first code in a module.
+CFunctionLayoutCheckDataDeclaration = 1
+# Check whether no initialization of a variable as part of its declaration
+CFunctionLayoutCheckNoInitOfVariable = 1
+# Check whether no use of STATIC for functions
+CFunctionLayoutCheckNoStatic = 1
+
+#
+# Include Files Checking
+#
+IncludeFileCheckAll = 0
+
+#Check whether having include files with same name
+IncludeFileCheckSameName = 1
+# Check whether all include file contents is guarded by a #ifndef statement.
+# the #ifndef must be the first line of code following the file header comment
+# the #endif must appear on the last line in the file
+IncludeFileCheckIfndefStatement = 1
+# Check whether include files contain only public or only private data
+# Check whether include files NOT contain code or define data variables
+IncludeFileCheckData = 1
+
+#
+# Declarations and Data Types Checking
+#
+DeclarationDataTypeCheckAll = 0
+
+# Check whether no use of int, unsigned, char, void, static, long in any .c, .h or .asl files.
+DeclarationDataTypeCheckNoUseCType = 1
+# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
+DeclarationDataTypeCheckInOutModifier = 1
+# Check whether the EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols
+DeclarationDataTypeCheckEFIAPIModifier = 1
+# Check whether Enumerated Type has a 'typedef' and the name is capital
+DeclarationDataTypeCheckEnumeratedType = 1
+# Check whether Structure Type has a 'typedef' and the name is capital
+DeclarationDataTypeCheckStructureDeclaration = 1
+# Check whether having same Structure
+DeclarationDataTypeCheckSameStructure = 1
+# Check whether Union Type has a 'typedef' and the name is capital
+DeclarationDataTypeCheckUnionType = 1
+
+
+#
+# Naming Conventions Checking
+#
+NamingConventionCheckAll = 0
+
+# Check whether only capital letters are used for #define declarations
+NamingConventionCheckDefineStatement = 1
+# Check whether only capital letters are used for typedef declarations
+NamingConventionCheckTypedefStatement = 1
+# Check whether the #ifndef at the start of an include file uses both prefix and postfix underscore characters, '_'.
+NamingConventionCheckIfndefStatement = 1
+# Rule for path name, variable name and function name
+# 1. First character should be upper case
+# 2. Existing lower case in a word
+# 3. No space existence
+# 4. Global variable name must start by a 'g'
+# Check whether the path name followed the rule
+NamingConventionCheckPathName = 1
+# Check whether the variable name followed the rule
+NamingConventionCheckVariableName = 1
+# Check whether the function name followed the rule
+NamingConventionCheckFunctionName = 1
+# Check whether NO use short variable name with single character
+NamingConventionCheckSingleCharacterVariable = 1
+
+#
+# Doxygen Checking
+#
+DoxygenCheckAll = 0
+
+# Check whether the file headers are followed Doxygen special documentation blocks in section 2.3.5
+DoxygenCheckFileHeader = 1
+# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
+DoxygenCheckFunctionHeader = 1
+# Check whether the first line of text in a comment block is a brief description of the element being documented.
+# The brief description must end with a period.
+DoxygenCheckCommentDescription = 1
+# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
+DoxygenCheckCommentFormat = 1
+# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
+DoxygenCheckCommand = 1
+
+#
+# Meta-Data File Processing Checking
+#
+MetaDataFileCheckAll = 0
+
+# Check whether each file defined in meta-data exists
+MetaDataFileCheckPathName = 1
+# Generate a list for all files defined in meta-data files
+MetaDataFileCheckGenerateFileList = 1
+# The path of log file
+MetaDataFileCheckPathOfGenerateFileList = File.log
+# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
+# Each Library Instance must specify the Supported Module Types in its INF file,
+# and any module specifying the library instance must be one of the supported types.
+MetaDataFileCheckLibraryInstance = 1
+# Check whether a Library Instance has been defined for all dependent library classes
+MetaDataFileCheckLibraryInstanceDependent = 1
+# Check whether the Library Instances specified by the LibraryClasses sections are listed in order of dependencies
+MetaDataFileCheckLibraryInstanceOrder = 1
+# Check whether the unnecessary inclusion of library classes in the INF file
+MetaDataFileCheckLibraryNoUse = 1
+# Check the header file in Include\Library directory whether be defined in the package DEC file.
+MetaDataFileCheckLibraryDefinedInDec = 1
+# Check whether an INF file is specified in the FDF file, but not in the DSC file, then the INF file must be for a Binary module only
+MetaDataFileCheckBinaryInfInFdf = 1
+# Not to report error and warning related OS include file such as "windows.h" and "stdio.h".
+# Check whether a PCD is set in a DSC file or the FDF file, but not in both.
+MetaDataFileCheckPcdDuplicate = 1
+# Check whether PCD settings in the FDF file can only be related to flash.
+MetaDataFileCheckPcdFlash = 1
+# Check whether PCDs used in INF files but not specified in DSC or FDF files
+MetaDataFileCheckPcdNoUse = 0
+# Check whether having duplicate guids defined for Guid/Protocol/Ppi
+MetaDataFileCheckGuidDuplicate = 1
+# Check whether all files under module directory are described in INF files
+MetaDataFileCheckModuleFileNoUse = 1
+# Check whether the PCD is correctly used in C function via its type
+MetaDataFileCheckPcdType = 1
+# Check whether there are FILE_GUID duplication among different INF files
+MetaDataFileCheckModuleFileGuidDuplication = 1
+
+#
+# Uni File Processing Checking
+#
+UniCheckAll = 0
+# Check INF or DEC file whether defined the localized information in the associated UNI file.
+UniCheckHelpInfo = 1
+# Check PCD whether defined the prompt, help in the DEC file and localized information in the associated UNI file.
+UniCheckPCDInfo = 1
+# Uncheck whether UNI file is in UTF-16 format
+GeneralCheckUni = -1
+
+#
+# SMM Communicate Function Parameter Checking
+#
+SmmCommParaCheckAll = 0
+# Check if the EFI_SMM_COMMUNICATION_PROTOCOL parameter buffer type is Reserved / ACPI NVS or UEFI RT code/data
+SmmCommParaCheckBufferType = 1
+
+#
+# The check points in this section are reserved
+#
+# GotoStatementCheckAll = 0
+# SpellingCheckAll = 0
+#
+
+# A list for binary file ext name
+BinaryExtList = EXE, EFI, FV, ROM, DLL, COM, BMP, GIF, PYD, CMP, BIN, JPG, UNI, RAW, COM2, LIB, DEPEX, SYS, DB
+# A list for only scanning dirs, the dirs should be the top folder(s) under workspace
+ScanOnlyDirList = ScanFolder1 ScanFolder2
+# A list for Used to circumvent special strings
+TokenReleaceList = L'', L'\"', L"\"", L''', L""", L"\"\"", L"\"^", L" \"", L"\" \""
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/exception.xml b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/exception.xml
new file mode 100644
index 00000000..2bd25a96
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Ecc/exception.xml
@@ -0,0 +1,951 @@
+<<<<<<< .working
+<?xml version="1.0" encoding="UTF-8"?>
+<ExceptionList xmlns="http://www.uefi.org/2008/2.1" xmlns:xsi="http:/www.w3.org/2001/XMLSchema-instance">
+ <Copyright>Copyright (c) 2009 - 2010, Intel Corporation. All rights reserved.</Copyright>
+ <License>
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+ </License>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_break</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>InternalX86DisablePaging32</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>InternalX86EnablePaging32</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>InternalLongJump</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>SetJump</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoRead8</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoWrite8</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoRead16</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoWrite16</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoRead32</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoWrite32</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@R1</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@R2</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@Rx</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@R2.</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_DriverUnloadHandler</KeyWord>
+ <ErrorID>8006</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>ASSERT</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>REPORT_STATUS_CODE</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>REPORT_STATUS_CODE_WITH_EXTENDED_DATA</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>REPORT_STATUS_CODE_WITH_DEVICE_PATH</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+||||||| .merge-left.r145445
+<?xml version="1.0" encoding="UTF-8"?>
+<ExceptionList xmlns="http://www.uefi.org/2008/2.1" xmlns:xsi="http:/www.w3.org/2001/XMLSchema-instance">
+ <Copyright>Copyright (c) 2009 - 2010, Intel Corporation. All rights reserved.</Copyright>
+ <License>
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+ </License>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_break</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>InternalX86DisablePaging32</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>InternalX86EnablePaging32</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>InternalLongJump</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>SetJump</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoRead8</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoWrite8</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoRead16</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoWrite16</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoRead32</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoWrite32</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@R1</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@R2</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@Rx</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@R2.</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_DriverUnloadHandler</KeyWord>
+ <ErrorID>8006</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>ASSERT</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>REPORT_STATUS_CODE</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>REPORT_STATUS_CODE_WITH_EXTENDED_DATA</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>REPORT_STATUS_CODE_WITH_DEVICE_PATH</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+=======
+<?xml version="1.0" encoding="UTF-8"?>
+<ExceptionList xmlns="http://www.uefi.org/2008/2.1" xmlns:xsi="http:/www.w3.org/2001/XMLSchema-instance">
+ <Copyright>Copyright (c) 2009 - 2010, Intel Corporation. All rights reserved.</Copyright>
+ <License>
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+ </License>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_break</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>InternalX86DisablePaging32</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>InternalX86EnablePaging32</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>InternalLongJump</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>SetJump</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoRead8</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoWrite8</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoRead16</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoWrite16</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoRead32</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoWrite32</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@R1</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@R2</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@Rx</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@R2.</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_DriverUnloadHandler</KeyWord>
+ <ErrorID>8006</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ModuleEntryPoint</KeyWord>
+ <ErrorID>8006</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>ASSERT</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>REPORT_STATUS_CODE</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>REPORT_STATUS_CODE_WITH_EXTENDED_DATA</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>REPORT_STATUS_CODE_WITH_DEVICE_PATH</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+>>>>>>> .merge-right.r145445
+</ExceptionList> \ No newline at end of file
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser3/CLexer.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser3/CLexer.py
new file mode 100755
index 00000000..f57d8e1e
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser3/CLexer.py
@@ -0,0 +1,4941 @@
+# $ANTLR 3.0.1 C.g 2010-02-23 09:58:53
+
+from antlr3 import *
+from antlr3.compat import set, frozenset
+
+## @file
+# The file defines the Lexer for C source files.
+#
+# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+
+
+# for convenience in actions
+HIDDEN = BaseRecognizer.HIDDEN
+
+# token types
+T114=114
+T115=115
+T116=116
+T117=117
+FloatTypeSuffix=16
+LETTER=11
+T29=29
+T28=28
+T27=27
+T26=26
+T25=25
+EOF=-1
+STRING_LITERAL=9
+FLOATING_POINT_LITERAL=10
+T38=38
+T37=37
+T39=39
+T34=34
+COMMENT=22
+T33=33
+T36=36
+T35=35
+T30=30
+T32=32
+T31=31
+LINE_COMMENT=23
+IntegerTypeSuffix=14
+CHARACTER_LITERAL=8
+T49=49
+T48=48
+T100=100
+T43=43
+T42=42
+T102=102
+T41=41
+T101=101
+T40=40
+T47=47
+T46=46
+T45=45
+T44=44
+T109=109
+T107=107
+T108=108
+T105=105
+WS=19
+T106=106
+T103=103
+T104=104
+T50=50
+LINE_COMMAND=24
+T59=59
+T113=113
+T52=52
+T112=112
+T51=51
+T111=111
+T54=54
+T110=110
+EscapeSequence=12
+DECIMAL_LITERAL=7
+T53=53
+T56=56
+T55=55
+T58=58
+T57=57
+T75=75
+T76=76
+T73=73
+T74=74
+T79=79
+T77=77
+T78=78
+Exponent=15
+HexDigit=13
+T72=72
+T71=71
+T70=70
+T62=62
+T63=63
+T64=64
+T65=65
+T66=66
+T67=67
+T68=68
+T69=69
+IDENTIFIER=4
+UnicodeVocabulary=21
+HEX_LITERAL=5
+T61=61
+T60=60
+T99=99
+T97=97
+BS=20
+T98=98
+T95=95
+T96=96
+OCTAL_LITERAL=6
+T94=94
+Tokens=118
+T93=93
+T92=92
+T91=91
+T90=90
+T88=88
+T89=89
+T84=84
+T85=85
+T86=86
+T87=87
+UnicodeEscape=18
+T81=81
+T80=80
+T83=83
+OctalEscape=17
+T82=82
+
+class CLexer(Lexer):
+
+ grammarFileName = "C.g"
+
+ def __init__(self, input=None):
+ Lexer.__init__(self, input)
+ self.dfa25 = self.DFA25(
+ self, 25,
+ eot = self.DFA25_eot,
+ eof = self.DFA25_eof,
+ min = self.DFA25_min,
+ max = self.DFA25_max,
+ accept = self.DFA25_accept,
+ special = self.DFA25_special,
+ transition = self.DFA25_transition
+ )
+ self.dfa35 = self.DFA35(
+ self, 35,
+ eot = self.DFA35_eot,
+ eof = self.DFA35_eof,
+ min = self.DFA35_min,
+ max = self.DFA35_max,
+ accept = self.DFA35_accept,
+ special = self.DFA35_special,
+ transition = self.DFA35_transition
+ )
+
+
+
+
+
+
+ # $ANTLR start T25
+ def mT25(self, ):
+
+ try:
+ self.type = T25
+
+ # C.g:27:5: ( ';' )
+ # C.g:27:7: ';'
+ self.match(u';')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T25
+
+
+
+ # $ANTLR start T26
+ def mT26(self, ):
+
+ try:
+ self.type = T26
+
+ # C.g:28:5: ( 'typedef' )
+ # C.g:28:7: 'typedef'
+ self.match("typedef")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T26
+
+
+
+ # $ANTLR start T27
+ def mT27(self, ):
+
+ try:
+ self.type = T27
+
+ # C.g:29:5: ( ',' )
+ # C.g:29:7: ','
+ self.match(u',')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T27
+
+
+
+ # $ANTLR start T28
+ def mT28(self, ):
+
+ try:
+ self.type = T28
+
+ # C.g:30:5: ( '=' )
+ # C.g:30:7: '='
+ self.match(u'=')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T28
+
+
+
+ # $ANTLR start T29
+ def mT29(self, ):
+
+ try:
+ self.type = T29
+
+ # C.g:31:5: ( 'extern' )
+ # C.g:31:7: 'extern'
+ self.match("extern")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T29
+
+
+
+ # $ANTLR start T30
+ def mT30(self, ):
+
+ try:
+ self.type = T30
+
+ # C.g:32:5: ( 'static' )
+ # C.g:32:7: 'static'
+ self.match("static")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T30
+
+
+
+ # $ANTLR start T31
+ def mT31(self, ):
+
+ try:
+ self.type = T31
+
+ # C.g:33:5: ( 'auto' )
+ # C.g:33:7: 'auto'
+ self.match("auto")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T31
+
+
+
+ # $ANTLR start T32
+ def mT32(self, ):
+
+ try:
+ self.type = T32
+
+ # C.g:34:5: ( 'register' )
+ # C.g:34:7: 'register'
+ self.match("register")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T32
+
+
+
+ # $ANTLR start T33
+ def mT33(self, ):
+
+ try:
+ self.type = T33
+
+ # C.g:35:5: ( 'STATIC' )
+ # C.g:35:7: 'STATIC'
+ self.match("STATIC")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T33
+
+
+
+ # $ANTLR start T34
+ def mT34(self, ):
+
+ try:
+ self.type = T34
+
+ # C.g:36:5: ( 'void' )
+ # C.g:36:7: 'void'
+ self.match("void")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T34
+
+
+
+ # $ANTLR start T35
+ def mT35(self, ):
+
+ try:
+ self.type = T35
+
+ # C.g:37:5: ( 'char' )
+ # C.g:37:7: 'char'
+ self.match("char")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T35
+
+
+
+ # $ANTLR start T36
+ def mT36(self, ):
+
+ try:
+ self.type = T36
+
+ # C.g:38:5: ( 'short' )
+ # C.g:38:7: 'short'
+ self.match("short")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T36
+
+
+
+ # $ANTLR start T37
+ def mT37(self, ):
+
+ try:
+ self.type = T37
+
+ # C.g:39:5: ( 'int' )
+ # C.g:39:7: 'int'
+ self.match("int")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T37
+
+
+
+ # $ANTLR start T38
+ def mT38(self, ):
+
+ try:
+ self.type = T38
+
+ # C.g:40:5: ( 'long' )
+ # C.g:40:7: 'long'
+ self.match("long")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T38
+
+
+
+ # $ANTLR start T39
+ def mT39(self, ):
+
+ try:
+ self.type = T39
+
+ # C.g:41:5: ( 'float' )
+ # C.g:41:7: 'float'
+ self.match("float")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T39
+
+
+
+ # $ANTLR start T40
+ def mT40(self, ):
+
+ try:
+ self.type = T40
+
+ # C.g:42:5: ( 'double' )
+ # C.g:42:7: 'double'
+ self.match("double")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T40
+
+
+
+ # $ANTLR start T41
+ def mT41(self, ):
+
+ try:
+ self.type = T41
+
+ # C.g:43:5: ( 'signed' )
+ # C.g:43:7: 'signed'
+ self.match("signed")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T41
+
+
+
+ # $ANTLR start T42
+ def mT42(self, ):
+
+ try:
+ self.type = T42
+
+ # C.g:44:5: ( 'unsigned' )
+ # C.g:44:7: 'unsigned'
+ self.match("unsigned")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T42
+
+
+
+ # $ANTLR start T43
+ def mT43(self, ):
+
+ try:
+ self.type = T43
+
+ # C.g:45:5: ( '{' )
+ # C.g:45:7: '{'
+ self.match(u'{')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T43
+
+
+
+ # $ANTLR start T44
+ def mT44(self, ):
+
+ try:
+ self.type = T44
+
+ # C.g:46:5: ( '}' )
+ # C.g:46:7: '}'
+ self.match(u'}')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T44
+
+
+
+ # $ANTLR start T45
+ def mT45(self, ):
+
+ try:
+ self.type = T45
+
+ # C.g:47:5: ( 'struct' )
+ # C.g:47:7: 'struct'
+ self.match("struct")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T45
+
+
+
+ # $ANTLR start T46
+ def mT46(self, ):
+
+ try:
+ self.type = T46
+
+ # C.g:48:5: ( 'union' )
+ # C.g:48:7: 'union'
+ self.match("union")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T46
+
+
+
+ # $ANTLR start T47
+ def mT47(self, ):
+
+ try:
+ self.type = T47
+
+ # C.g:49:5: ( ':' )
+ # C.g:49:7: ':'
+ self.match(u':')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T47
+
+
+
+ # $ANTLR start T48
+ def mT48(self, ):
+
+ try:
+ self.type = T48
+
+ # C.g:50:5: ( 'enum' )
+ # C.g:50:7: 'enum'
+ self.match("enum")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T48
+
+
+
+ # $ANTLR start T49
+ def mT49(self, ):
+
+ try:
+ self.type = T49
+
+ # C.g:51:5: ( 'const' )
+ # C.g:51:7: 'const'
+ self.match("const")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T49
+
+
+
+ # $ANTLR start T50
+ def mT50(self, ):
+
+ try:
+ self.type = T50
+
+ # C.g:52:5: ( 'volatile' )
+ # C.g:52:7: 'volatile'
+ self.match("volatile")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T50
+
+
+
+ # $ANTLR start T51
+ def mT51(self, ):
+
+ try:
+ self.type = T51
+
+ # C.g:53:5: ( 'IN' )
+ # C.g:53:7: 'IN'
+ self.match("IN")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T51
+
+
+
+ # $ANTLR start T52
+ def mT52(self, ):
+
+ try:
+ self.type = T52
+
+ # C.g:54:5: ( 'OUT' )
+ # C.g:54:7: 'OUT'
+ self.match("OUT")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T52
+
+
+
+ # $ANTLR start T53
+ def mT53(self, ):
+
+ try:
+ self.type = T53
+
+ # C.g:55:5: ( 'OPTIONAL' )
+ # C.g:55:7: 'OPTIONAL'
+ self.match("OPTIONAL")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T53
+
+
+
+ # $ANTLR start T54
+ def mT54(self, ):
+
+ try:
+ self.type = T54
+
+ # C.g:56:5: ( 'CONST' )
+ # C.g:56:7: 'CONST'
+ self.match("CONST")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T54
+
+
+
+ # $ANTLR start T55
+ def mT55(self, ):
+
+ try:
+ self.type = T55
+
+ # C.g:57:5: ( 'UNALIGNED' )
+ # C.g:57:7: 'UNALIGNED'
+ self.match("UNALIGNED")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T55
+
+
+
+ # $ANTLR start T56
+ def mT56(self, ):
+
+ try:
+ self.type = T56
+
+ # C.g:58:5: ( 'VOLATILE' )
+ # C.g:58:7: 'VOLATILE'
+ self.match("VOLATILE")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T56
+
+
+
+ # $ANTLR start T57
+ def mT57(self, ):
+
+ try:
+ self.type = T57
+
+ # C.g:59:5: ( 'GLOBAL_REMOVE_IF_UNREFERENCED' )
+ # C.g:59:7: 'GLOBAL_REMOVE_IF_UNREFERENCED'
+ self.match("GLOBAL_REMOVE_IF_UNREFERENCED")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T57
+
+
+
+ # $ANTLR start T58
+ def mT58(self, ):
+
+ try:
+ self.type = T58
+
+ # C.g:60:5: ( 'EFIAPI' )
+ # C.g:60:7: 'EFIAPI'
+ self.match("EFIAPI")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T58
+
+
+
+ # $ANTLR start T59
+ def mT59(self, ):
+
+ try:
+ self.type = T59
+
+ # C.g:61:5: ( 'EFI_BOOTSERVICE' )
+ # C.g:61:7: 'EFI_BOOTSERVICE'
+ self.match("EFI_BOOTSERVICE")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T59
+
+
+
+ # $ANTLR start T60
+ def mT60(self, ):
+
+ try:
+ self.type = T60
+
+ # C.g:62:5: ( 'EFI_RUNTIMESERVICE' )
+ # C.g:62:7: 'EFI_RUNTIMESERVICE'
+ self.match("EFI_RUNTIMESERVICE")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T60
+
+
+
+ # $ANTLR start T61
+ def mT61(self, ):
+
+ try:
+ self.type = T61
+
+ # C.g:63:5: ( 'PACKED' )
+ # C.g:63:7: 'PACKED'
+ self.match("PACKED")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T61
+
+
+
+ # $ANTLR start T62
+ def mT62(self, ):
+
+ try:
+ self.type = T62
+
+ # C.g:64:5: ( '(' )
+ # C.g:64:7: '('
+ self.match(u'(')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T62
+
+
+
+ # $ANTLR start T63
+ def mT63(self, ):
+
+ try:
+ self.type = T63
+
+ # C.g:65:5: ( ')' )
+ # C.g:65:7: ')'
+ self.match(u')')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T63
+
+
+
+ # $ANTLR start T64
+ def mT64(self, ):
+
+ try:
+ self.type = T64
+
+ # C.g:66:5: ( '[' )
+ # C.g:66:7: '['
+ self.match(u'[')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T64
+
+
+
+ # $ANTLR start T65
+ def mT65(self, ):
+
+ try:
+ self.type = T65
+
+ # C.g:67:5: ( ']' )
+ # C.g:67:7: ']'
+ self.match(u']')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T65
+
+
+
+ # $ANTLR start T66
+ def mT66(self, ):
+
+ try:
+ self.type = T66
+
+ # C.g:68:5: ( '*' )
+ # C.g:68:7: '*'
+ self.match(u'*')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T66
+
+
+
+ # $ANTLR start T67
+ def mT67(self, ):
+
+ try:
+ self.type = T67
+
+ # C.g:69:5: ( '...' )
+ # C.g:69:7: '...'
+ self.match("...")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T67
+
+
+
+ # $ANTLR start T68
+ def mT68(self, ):
+
+ try:
+ self.type = T68
+
+ # C.g:70:5: ( '+' )
+ # C.g:70:7: '+'
+ self.match(u'+')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T68
+
+
+
+ # $ANTLR start T69
+ def mT69(self, ):
+
+ try:
+ self.type = T69
+
+ # C.g:71:5: ( '-' )
+ # C.g:71:7: '-'
+ self.match(u'-')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T69
+
+
+
+ # $ANTLR start T70
+ def mT70(self, ):
+
+ try:
+ self.type = T70
+
+ # C.g:72:5: ( '/' )
+ # C.g:72:7: '/'
+ self.match(u'/')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T70
+
+
+
+ # $ANTLR start T71
+ def mT71(self, ):
+
+ try:
+ self.type = T71
+
+ # C.g:73:5: ( '%' )
+ # C.g:73:7: '%'
+ self.match(u'%')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T71
+
+
+
+ # $ANTLR start T72
+ def mT72(self, ):
+
+ try:
+ self.type = T72
+
+ # C.g:74:5: ( '++' )
+ # C.g:74:7: '++'
+ self.match("++")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T72
+
+
+
+ # $ANTLR start T73
+ def mT73(self, ):
+
+ try:
+ self.type = T73
+
+ # C.g:75:5: ( '--' )
+ # C.g:75:7: '--'
+ self.match("--")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T73
+
+
+
+ # $ANTLR start T74
+ def mT74(self, ):
+
+ try:
+ self.type = T74
+
+ # C.g:76:5: ( 'sizeof' )
+ # C.g:76:7: 'sizeof'
+ self.match("sizeof")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T74
+
+
+
+ # $ANTLR start T75
+ def mT75(self, ):
+
+ try:
+ self.type = T75
+
+ # C.g:77:5: ( '.' )
+ # C.g:77:7: '.'
+ self.match(u'.')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T75
+
+
+
+ # $ANTLR start T76
+ def mT76(self, ):
+
+ try:
+ self.type = T76
+
+ # C.g:78:5: ( '->' )
+ # C.g:78:7: '->'
+ self.match("->")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T76
+
+
+
+ # $ANTLR start T77
+ def mT77(self, ):
+
+ try:
+ self.type = T77
+
+ # C.g:79:5: ( '&' )
+ # C.g:79:7: '&'
+ self.match(u'&')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T77
+
+
+
+ # $ANTLR start T78
+ def mT78(self, ):
+
+ try:
+ self.type = T78
+
+ # C.g:80:5: ( '~' )
+ # C.g:80:7: '~'
+ self.match(u'~')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T78
+
+
+
+ # $ANTLR start T79
+ def mT79(self, ):
+
+ try:
+ self.type = T79
+
+ # C.g:81:5: ( '!' )
+ # C.g:81:7: '!'
+ self.match(u'!')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T79
+
+
+
+ # $ANTLR start T80
+ def mT80(self, ):
+
+ try:
+ self.type = T80
+
+ # C.g:82:5: ( '*=' )
+ # C.g:82:7: '*='
+ self.match("*=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T80
+
+
+
+ # $ANTLR start T81
+ def mT81(self, ):
+
+ try:
+ self.type = T81
+
+ # C.g:83:5: ( '/=' )
+ # C.g:83:7: '/='
+ self.match("/=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T81
+
+
+
+ # $ANTLR start T82
+ def mT82(self, ):
+
+ try:
+ self.type = T82
+
+ # C.g:84:5: ( '%=' )
+ # C.g:84:7: '%='
+ self.match("%=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T82
+
+
+
+ # $ANTLR start T83
+ def mT83(self, ):
+
+ try:
+ self.type = T83
+
+ # C.g:85:5: ( '+=' )
+ # C.g:85:7: '+='
+ self.match("+=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T83
+
+
+
+ # $ANTLR start T84
+ def mT84(self, ):
+
+ try:
+ self.type = T84
+
+ # C.g:86:5: ( '-=' )
+ # C.g:86:7: '-='
+ self.match("-=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T84
+
+
+
+ # $ANTLR start T85
+ def mT85(self, ):
+
+ try:
+ self.type = T85
+
+ # C.g:87:5: ( '<<=' )
+ # C.g:87:7: '<<='
+ self.match("<<=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T85
+
+
+
+ # $ANTLR start T86
+ def mT86(self, ):
+
+ try:
+ self.type = T86
+
+ # C.g:88:5: ( '>>=' )
+ # C.g:88:7: '>>='
+ self.match(">>=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T86
+
+
+
+ # $ANTLR start T87
+ def mT87(self, ):
+
+ try:
+ self.type = T87
+
+ # C.g:89:5: ( '&=' )
+ # C.g:89:7: '&='
+ self.match("&=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T87
+
+
+
+ # $ANTLR start T88
+ def mT88(self, ):
+
+ try:
+ self.type = T88
+
+ # C.g:90:5: ( '^=' )
+ # C.g:90:7: '^='
+ self.match("^=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T88
+
+
+
+ # $ANTLR start T89
+ def mT89(self, ):
+
+ try:
+ self.type = T89
+
+ # C.g:91:5: ( '|=' )
+ # C.g:91:7: '|='
+ self.match("|=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T89
+
+
+
+ # $ANTLR start T90
+ def mT90(self, ):
+
+ try:
+ self.type = T90
+
+ # C.g:92:5: ( '?' )
+ # C.g:92:7: '?'
+ self.match(u'?')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T90
+
+
+
+ # $ANTLR start T91
+ def mT91(self, ):
+
+ try:
+ self.type = T91
+
+ # C.g:93:5: ( '||' )
+ # C.g:93:7: '||'
+ self.match("||")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T91
+
+
+
+ # $ANTLR start T92
+ def mT92(self, ):
+
+ try:
+ self.type = T92
+
+ # C.g:94:5: ( '&&' )
+ # C.g:94:7: '&&'
+ self.match("&&")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T92
+
+
+
+ # $ANTLR start T93
+ def mT93(self, ):
+
+ try:
+ self.type = T93
+
+ # C.g:95:5: ( '|' )
+ # C.g:95:7: '|'
+ self.match(u'|')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T93
+
+
+
+ # $ANTLR start T94
+ def mT94(self, ):
+
+ try:
+ self.type = T94
+
+ # C.g:96:5: ( '^' )
+ # C.g:96:7: '^'
+ self.match(u'^')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T94
+
+
+
+ # $ANTLR start T95
+ def mT95(self, ):
+
+ try:
+ self.type = T95
+
+ # C.g:97:5: ( '==' )
+ # C.g:97:7: '=='
+ self.match("==")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T95
+
+
+
+ # $ANTLR start T96
+ def mT96(self, ):
+
+ try:
+ self.type = T96
+
+ # C.g:98:5: ( '!=' )
+ # C.g:98:7: '!='
+ self.match("!=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T96
+
+
+
+ # $ANTLR start T97
+ def mT97(self, ):
+
+ try:
+ self.type = T97
+
+ # C.g:99:5: ( '<' )
+ # C.g:99:7: '<'
+ self.match(u'<')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T97
+
+
+
+ # $ANTLR start T98
+ def mT98(self, ):
+
+ try:
+ self.type = T98
+
+ # C.g:100:5: ( '>' )
+ # C.g:100:7: '>'
+ self.match(u'>')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T98
+
+
+
+ # $ANTLR start T99
+ def mT99(self, ):
+
+ try:
+ self.type = T99
+
+ # C.g:101:5: ( '<=' )
+ # C.g:101:7: '<='
+ self.match("<=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T99
+
+
+
+ # $ANTLR start T100
+ def mT100(self, ):
+
+ try:
+ self.type = T100
+
+ # C.g:102:6: ( '>=' )
+ # C.g:102:8: '>='
+ self.match(">=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T100
+
+
+
+ # $ANTLR start T101
+ def mT101(self, ):
+
+ try:
+ self.type = T101
+
+ # C.g:103:6: ( '<<' )
+ # C.g:103:8: '<<'
+ self.match("<<")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T101
+
+
+
+ # $ANTLR start T102
+ def mT102(self, ):
+
+ try:
+ self.type = T102
+
+ # C.g:104:6: ( '>>' )
+ # C.g:104:8: '>>'
+ self.match(">>")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T102
+
+
+
+ # $ANTLR start T103
+ def mT103(self, ):
+
+ try:
+ self.type = T103
+
+ # C.g:105:6: ( '__asm__' )
+ # C.g:105:8: '__asm__'
+ self.match("__asm__")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T103
+
+
+
+ # $ANTLR start T104
+ def mT104(self, ):
+
+ try:
+ self.type = T104
+
+ # C.g:106:6: ( '_asm' )
+ # C.g:106:8: '_asm'
+ self.match("_asm")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T104
+
+
+
+ # $ANTLR start T105
+ def mT105(self, ):
+
+ try:
+ self.type = T105
+
+ # C.g:107:6: ( '__asm' )
+ # C.g:107:8: '__asm'
+ self.match("__asm")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T105
+
+
+
+ # $ANTLR start T106
+ def mT106(self, ):
+
+ try:
+ self.type = T106
+
+ # C.g:108:6: ( 'case' )
+ # C.g:108:8: 'case'
+ self.match("case")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T106
+
+
+
+ # $ANTLR start T107
+ def mT107(self, ):
+
+ try:
+ self.type = T107
+
+ # C.g:109:6: ( 'default' )
+ # C.g:109:8: 'default'
+ self.match("default")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T107
+
+
+
+ # $ANTLR start T108
+ def mT108(self, ):
+
+ try:
+ self.type = T108
+
+ # C.g:110:6: ( 'if' )
+ # C.g:110:8: 'if'
+ self.match("if")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T108
+
+
+
+ # $ANTLR start T109
+ def mT109(self, ):
+
+ try:
+ self.type = T109
+
+ # C.g:111:6: ( 'else' )
+ # C.g:111:8: 'else'
+ self.match("else")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T109
+
+
+
+ # $ANTLR start T110
+ def mT110(self, ):
+
+ try:
+ self.type = T110
+
+ # C.g:112:6: ( 'switch' )
+ # C.g:112:8: 'switch'
+ self.match("switch")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T110
+
+
+
+ # $ANTLR start T111
+ def mT111(self, ):
+
+ try:
+ self.type = T111
+
+ # C.g:113:6: ( 'while' )
+ # C.g:113:8: 'while'
+ self.match("while")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T111
+
+
+
+ # $ANTLR start T112
+ def mT112(self, ):
+
+ try:
+ self.type = T112
+
+ # C.g:114:6: ( 'do' )
+ # C.g:114:8: 'do'
+ self.match("do")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T112
+
+
+
+ # $ANTLR start T113
+ def mT113(self, ):
+
+ try:
+ self.type = T113
+
+ # C.g:115:6: ( 'for' )
+ # C.g:115:8: 'for'
+ self.match("for")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T113
+
+
+
+ # $ANTLR start T114
+ def mT114(self, ):
+
+ try:
+ self.type = T114
+
+ # C.g:116:6: ( 'goto' )
+ # C.g:116:8: 'goto'
+ self.match("goto")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T114
+
+
+
+ # $ANTLR start T115
+ def mT115(self, ):
+
+ try:
+ self.type = T115
+
+ # C.g:117:6: ( 'continue' )
+ # C.g:117:8: 'continue'
+ self.match("continue")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T115
+
+
+
+ # $ANTLR start T116
+ def mT116(self, ):
+
+ try:
+ self.type = T116
+
+ # C.g:118:6: ( 'break' )
+ # C.g:118:8: 'break'
+ self.match("break")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T116
+
+
+
+ # $ANTLR start T117
+ def mT117(self, ):
+
+ try:
+ self.type = T117
+
+ # C.g:119:6: ( 'return' )
+ # C.g:119:8: 'return'
+ self.match("return")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T117
+
+
+
+ # $ANTLR start IDENTIFIER
+ def mIDENTIFIER(self, ):
+
+ try:
+ self.type = IDENTIFIER
+
+ # C.g:586:2: ( LETTER ( LETTER | '0' .. '9' )* )
+ # C.g:586:4: LETTER ( LETTER | '0' .. '9' )*
+ self.mLETTER()
+
+ # C.g:586:11: ( LETTER | '0' .. '9' )*
+ while True: #loop1
+ alt1 = 2
+ LA1_0 = self.input.LA(1)
+
+ if (LA1_0 == u'$' or (u'0' <= LA1_0 <= u'9') or (u'A' <= LA1_0 <= u'Z') or LA1_0 == u'_' or (u'a' <= LA1_0 <= u'z')) :
+ alt1 = 1
+
+
+ if alt1 == 1:
+ # C.g:
+ if self.input.LA(1) == u'$' or (u'0' <= self.input.LA(1) <= u'9') or (u'A' <= self.input.LA(1) <= u'Z') or self.input.LA(1) == u'_' or (u'a' <= self.input.LA(1) <= u'z'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ else:
+ break #loop1
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end IDENTIFIER
+
+
+
+ # $ANTLR start LETTER
+ def mLETTER(self, ):
+
+ try:
+ # C.g:591:2: ( '$' | 'A' .. 'Z' | 'a' .. 'z' | '_' )
+ # C.g:
+ if self.input.LA(1) == u'$' or (u'A' <= self.input.LA(1) <= u'Z') or self.input.LA(1) == u'_' or (u'a' <= self.input.LA(1) <= u'z'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end LETTER
+
+
+
+ # $ANTLR start CHARACTER_LITERAL
+ def mCHARACTER_LITERAL(self, ):
+
+ try:
+ self.type = CHARACTER_LITERAL
+
+ # C.g:598:5: ( ( 'L' )? '\\'' ( EscapeSequence | ~ ( '\\'' | '\\\\' ) ) '\\'' )
+ # C.g:598:9: ( 'L' )? '\\'' ( EscapeSequence | ~ ( '\\'' | '\\\\' ) ) '\\''
+ # C.g:598:9: ( 'L' )?
+ alt2 = 2
+ LA2_0 = self.input.LA(1)
+
+ if (LA2_0 == u'L') :
+ alt2 = 1
+ if alt2 == 1:
+ # C.g:598:10: 'L'
+ self.match(u'L')
+
+
+
+
+ self.match(u'\'')
+
+ # C.g:598:21: ( EscapeSequence | ~ ( '\\'' | '\\\\' ) )
+ alt3 = 2
+ LA3_0 = self.input.LA(1)
+
+ if (LA3_0 == u'\\') :
+ alt3 = 1
+ elif ((u'\u0000' <= LA3_0 <= u'&') or (u'(' <= LA3_0 <= u'[') or (u']' <= LA3_0 <= u'\uFFFE')) :
+ alt3 = 2
+ else:
+ nvae = NoViableAltException("598:21: ( EscapeSequence | ~ ( '\\'' | '\\\\' ) )", 3, 0, self.input)
+
+ raise nvae
+
+ if alt3 == 1:
+ # C.g:598:23: EscapeSequence
+ self.mEscapeSequence()
+
+
+
+ elif alt3 == 2:
+ # C.g:598:40: ~ ( '\\'' | '\\\\' )
+ if (u'\u0000' <= self.input.LA(1) <= u'&') or (u'(' <= self.input.LA(1) <= u'[') or (u']' <= self.input.LA(1) <= u'\uFFFE'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+
+ self.match(u'\'')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end CHARACTER_LITERAL
+
+
+
+ # $ANTLR start STRING_LITERAL
+ def mSTRING_LITERAL(self, ):
+
+ try:
+ self.type = STRING_LITERAL
+
+ # C.g:602:5: ( ( 'L' )? '\"' ( EscapeSequence | ~ ( '\\\\' | '\"' ) )* '\"' )
+ # C.g:602:8: ( 'L' )? '\"' ( EscapeSequence | ~ ( '\\\\' | '\"' ) )* '\"'
+ # C.g:602:8: ( 'L' )?
+ alt4 = 2
+ LA4_0 = self.input.LA(1)
+
+ if (LA4_0 == u'L') :
+ alt4 = 1
+ if alt4 == 1:
+ # C.g:602:9: 'L'
+ self.match(u'L')
+
+
+
+
+ self.match(u'"')
+
+ # C.g:602:19: ( EscapeSequence | ~ ( '\\\\' | '\"' ) )*
+ while True: #loop5
+ alt5 = 3
+ LA5_0 = self.input.LA(1)
+
+ if (LA5_0 == u'\\') :
+ alt5 = 1
+ elif ((u'\u0000' <= LA5_0 <= u'!') or (u'#' <= LA5_0 <= u'[') or (u']' <= LA5_0 <= u'\uFFFE')) :
+ alt5 = 2
+
+
+ if alt5 == 1:
+ # C.g:602:21: EscapeSequence
+ self.mEscapeSequence()
+
+
+
+ elif alt5 == 2:
+ # C.g:602:38: ~ ( '\\\\' | '\"' )
+ if (u'\u0000' <= self.input.LA(1) <= u'!') or (u'#' <= self.input.LA(1) <= u'[') or (u']' <= self.input.LA(1) <= u'\uFFFE'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ else:
+ break #loop5
+
+
+ self.match(u'"')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end STRING_LITERAL
+
+
+
+ # $ANTLR start HEX_LITERAL
+ def mHEX_LITERAL(self, ):
+
+ try:
+ self.type = HEX_LITERAL
+
+ # C.g:605:13: ( '0' ( 'x' | 'X' ) ( HexDigit )+ ( IntegerTypeSuffix )? )
+ # C.g:605:15: '0' ( 'x' | 'X' ) ( HexDigit )+ ( IntegerTypeSuffix )?
+ self.match(u'0')
+
+ if self.input.LA(1) == u'X' or self.input.LA(1) == u'x':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+ # C.g:605:29: ( HexDigit )+
+ cnt6 = 0
+ while True: #loop6
+ alt6 = 2
+ LA6_0 = self.input.LA(1)
+
+ if ((u'0' <= LA6_0 <= u'9') or (u'A' <= LA6_0 <= u'F') or (u'a' <= LA6_0 <= u'f')) :
+ alt6 = 1
+
+
+ if alt6 == 1:
+ # C.g:605:29: HexDigit
+ self.mHexDigit()
+
+
+
+ else:
+ if cnt6 >= 1:
+ break #loop6
+
+ eee = EarlyExitException(6, self.input)
+ raise eee
+
+ cnt6 += 1
+
+
+ # C.g:605:39: ( IntegerTypeSuffix )?
+ alt7 = 2
+ LA7_0 = self.input.LA(1)
+
+ if (LA7_0 == u'L' or LA7_0 == u'U' or LA7_0 == u'l' or LA7_0 == u'u') :
+ alt7 = 1
+ if alt7 == 1:
+ # C.g:605:39: IntegerTypeSuffix
+ self.mIntegerTypeSuffix()
+
+
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end HEX_LITERAL
+
+
+
+ # $ANTLR start DECIMAL_LITERAL
+ def mDECIMAL_LITERAL(self, ):
+
+ try:
+ self.type = DECIMAL_LITERAL
+
+ # C.g:607:17: ( ( '0' | '1' .. '9' ( '0' .. '9' )* ) ( IntegerTypeSuffix )? )
+ # C.g:607:19: ( '0' | '1' .. '9' ( '0' .. '9' )* ) ( IntegerTypeSuffix )?
+ # C.g:607:19: ( '0' | '1' .. '9' ( '0' .. '9' )* )
+ alt9 = 2
+ LA9_0 = self.input.LA(1)
+
+ if (LA9_0 == u'0') :
+ alt9 = 1
+ elif ((u'1' <= LA9_0 <= u'9')) :
+ alt9 = 2
+ else:
+ nvae = NoViableAltException("607:19: ( '0' | '1' .. '9' ( '0' .. '9' )* )", 9, 0, self.input)
+
+ raise nvae
+
+ if alt9 == 1:
+ # C.g:607:20: '0'
+ self.match(u'0')
+
+
+
+ elif alt9 == 2:
+ # C.g:607:26: '1' .. '9' ( '0' .. '9' )*
+ self.matchRange(u'1', u'9')
+
+ # C.g:607:35: ( '0' .. '9' )*
+ while True: #loop8
+ alt8 = 2
+ LA8_0 = self.input.LA(1)
+
+ if ((u'0' <= LA8_0 <= u'9')) :
+ alt8 = 1
+
+
+ if alt8 == 1:
+ # C.g:607:35: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ break #loop8
+
+
+
+
+
+ # C.g:607:46: ( IntegerTypeSuffix )?
+ alt10 = 2
+ LA10_0 = self.input.LA(1)
+
+ if (LA10_0 == u'L' or LA10_0 == u'U' or LA10_0 == u'l' or LA10_0 == u'u') :
+ alt10 = 1
+ if alt10 == 1:
+ # C.g:607:46: IntegerTypeSuffix
+ self.mIntegerTypeSuffix()
+
+
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end DECIMAL_LITERAL
+
+
+
+ # $ANTLR start OCTAL_LITERAL
+ def mOCTAL_LITERAL(self, ):
+
+ try:
+ self.type = OCTAL_LITERAL
+
+ # C.g:609:15: ( '0' ( '0' .. '7' )+ ( IntegerTypeSuffix )? )
+ # C.g:609:17: '0' ( '0' .. '7' )+ ( IntegerTypeSuffix )?
+ self.match(u'0')
+
+ # C.g:609:21: ( '0' .. '7' )+
+ cnt11 = 0
+ while True: #loop11
+ alt11 = 2
+ LA11_0 = self.input.LA(1)
+
+ if ((u'0' <= LA11_0 <= u'7')) :
+ alt11 = 1
+
+
+ if alt11 == 1:
+ # C.g:609:22: '0' .. '7'
+ self.matchRange(u'0', u'7')
+
+
+
+ else:
+ if cnt11 >= 1:
+ break #loop11
+
+ eee = EarlyExitException(11, self.input)
+ raise eee
+
+ cnt11 += 1
+
+
+ # C.g:609:33: ( IntegerTypeSuffix )?
+ alt12 = 2
+ LA12_0 = self.input.LA(1)
+
+ if (LA12_0 == u'L' or LA12_0 == u'U' or LA12_0 == u'l' or LA12_0 == u'u') :
+ alt12 = 1
+ if alt12 == 1:
+ # C.g:609:33: IntegerTypeSuffix
+ self.mIntegerTypeSuffix()
+
+
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end OCTAL_LITERAL
+
+
+
+ # $ANTLR start HexDigit
+ def mHexDigit(self, ):
+
+ try:
+ # C.g:612:10: ( ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' ) )
+ # C.g:612:12: ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' )
+ if (u'0' <= self.input.LA(1) <= u'9') or (u'A' <= self.input.LA(1) <= u'F') or (u'a' <= self.input.LA(1) <= u'f'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end HexDigit
+
+
+
+ # $ANTLR start IntegerTypeSuffix
+ def mIntegerTypeSuffix(self, ):
+
+ try:
+ # C.g:616:2: ( ( 'u' | 'U' ) | ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' ) )
+ alt13 = 4
+ LA13_0 = self.input.LA(1)
+
+ if (LA13_0 == u'U' or LA13_0 == u'u') :
+ LA13_1 = self.input.LA(2)
+
+ if (LA13_1 == u'L' or LA13_1 == u'l') :
+ LA13_3 = self.input.LA(3)
+
+ if (LA13_3 == u'L' or LA13_3 == u'l') :
+ alt13 = 4
+ else:
+ alt13 = 3
+ else:
+ alt13 = 1
+ elif (LA13_0 == u'L' or LA13_0 == u'l') :
+ alt13 = 2
+ else:
+ nvae = NoViableAltException("614:1: fragment IntegerTypeSuffix : ( ( 'u' | 'U' ) | ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' ) );", 13, 0, self.input)
+
+ raise nvae
+
+ if alt13 == 1:
+ # C.g:616:4: ( 'u' | 'U' )
+ if self.input.LA(1) == u'U' or self.input.LA(1) == u'u':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ elif alt13 == 2:
+ # C.g:617:4: ( 'l' | 'L' )
+ if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ elif alt13 == 3:
+ # C.g:618:4: ( 'u' | 'U' ) ( 'l' | 'L' )
+ if self.input.LA(1) == u'U' or self.input.LA(1) == u'u':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+ if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ elif alt13 == 4:
+ # C.g:619:4: ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' )
+ if self.input.LA(1) == u'U' or self.input.LA(1) == u'u':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+ if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+ if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end IntegerTypeSuffix
+
+
+
+ # $ANTLR start FLOATING_POINT_LITERAL
+ def mFLOATING_POINT_LITERAL(self, ):
+
+ try:
+ self.type = FLOATING_POINT_LITERAL
+
+ # C.g:623:5: ( ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( Exponent )? ( FloatTypeSuffix )? | '.' ( '0' .. '9' )+ ( Exponent )? ( FloatTypeSuffix )? | ( '0' .. '9' )+ Exponent ( FloatTypeSuffix )? | ( '0' .. '9' )+ ( Exponent )? FloatTypeSuffix )
+ alt25 = 4
+ alt25 = self.dfa25.predict(self.input)
+ if alt25 == 1:
+ # C.g:623:9: ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( Exponent )? ( FloatTypeSuffix )?
+ # C.g:623:9: ( '0' .. '9' )+
+ cnt14 = 0
+ while True: #loop14
+ alt14 = 2
+ LA14_0 = self.input.LA(1)
+
+ if ((u'0' <= LA14_0 <= u'9')) :
+ alt14 = 1
+
+
+ if alt14 == 1:
+ # C.g:623:10: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ if cnt14 >= 1:
+ break #loop14
+
+ eee = EarlyExitException(14, self.input)
+ raise eee
+
+ cnt14 += 1
+
+
+ self.match(u'.')
+
+ # C.g:623:25: ( '0' .. '9' )*
+ while True: #loop15
+ alt15 = 2
+ LA15_0 = self.input.LA(1)
+
+ if ((u'0' <= LA15_0 <= u'9')) :
+ alt15 = 1
+
+
+ if alt15 == 1:
+ # C.g:623:26: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ break #loop15
+
+
+ # C.g:623:37: ( Exponent )?
+ alt16 = 2
+ LA16_0 = self.input.LA(1)
+
+ if (LA16_0 == u'E' or LA16_0 == u'e') :
+ alt16 = 1
+ if alt16 == 1:
+ # C.g:623:37: Exponent
+ self.mExponent()
+
+
+
+
+ # C.g:623:47: ( FloatTypeSuffix )?
+ alt17 = 2
+ LA17_0 = self.input.LA(1)
+
+ if (LA17_0 == u'D' or LA17_0 == u'F' or LA17_0 == u'd' or LA17_0 == u'f') :
+ alt17 = 1
+ if alt17 == 1:
+ # C.g:623:47: FloatTypeSuffix
+ self.mFloatTypeSuffix()
+
+
+
+
+
+
+ elif alt25 == 2:
+ # C.g:624:9: '.' ( '0' .. '9' )+ ( Exponent )? ( FloatTypeSuffix )?
+ self.match(u'.')
+
+ # C.g:624:13: ( '0' .. '9' )+
+ cnt18 = 0
+ while True: #loop18
+ alt18 = 2
+ LA18_0 = self.input.LA(1)
+
+ if ((u'0' <= LA18_0 <= u'9')) :
+ alt18 = 1
+
+
+ if alt18 == 1:
+ # C.g:624:14: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ if cnt18 >= 1:
+ break #loop18
+
+ eee = EarlyExitException(18, self.input)
+ raise eee
+
+ cnt18 += 1
+
+
+ # C.g:624:25: ( Exponent )?
+ alt19 = 2
+ LA19_0 = self.input.LA(1)
+
+ if (LA19_0 == u'E' or LA19_0 == u'e') :
+ alt19 = 1
+ if alt19 == 1:
+ # C.g:624:25: Exponent
+ self.mExponent()
+
+
+
+
+ # C.g:624:35: ( FloatTypeSuffix )?
+ alt20 = 2
+ LA20_0 = self.input.LA(1)
+
+ if (LA20_0 == u'D' or LA20_0 == u'F' or LA20_0 == u'd' or LA20_0 == u'f') :
+ alt20 = 1
+ if alt20 == 1:
+ # C.g:624:35: FloatTypeSuffix
+ self.mFloatTypeSuffix()
+
+
+
+
+
+
+ elif alt25 == 3:
+ # C.g:625:9: ( '0' .. '9' )+ Exponent ( FloatTypeSuffix )?
+ # C.g:625:9: ( '0' .. '9' )+
+ cnt21 = 0
+ while True: #loop21
+ alt21 = 2
+ LA21_0 = self.input.LA(1)
+
+ if ((u'0' <= LA21_0 <= u'9')) :
+ alt21 = 1
+
+
+ if alt21 == 1:
+ # C.g:625:10: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ if cnt21 >= 1:
+ break #loop21
+
+ eee = EarlyExitException(21, self.input)
+ raise eee
+
+ cnt21 += 1
+
+
+ self.mExponent()
+
+ # C.g:625:30: ( FloatTypeSuffix )?
+ alt22 = 2
+ LA22_0 = self.input.LA(1)
+
+ if (LA22_0 == u'D' or LA22_0 == u'F' or LA22_0 == u'd' or LA22_0 == u'f') :
+ alt22 = 1
+ if alt22 == 1:
+ # C.g:625:30: FloatTypeSuffix
+ self.mFloatTypeSuffix()
+
+
+
+
+
+
+ elif alt25 == 4:
+ # C.g:626:9: ( '0' .. '9' )+ ( Exponent )? FloatTypeSuffix
+ # C.g:626:9: ( '0' .. '9' )+
+ cnt23 = 0
+ while True: #loop23
+ alt23 = 2
+ LA23_0 = self.input.LA(1)
+
+ if ((u'0' <= LA23_0 <= u'9')) :
+ alt23 = 1
+
+
+ if alt23 == 1:
+ # C.g:626:10: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ if cnt23 >= 1:
+ break #loop23
+
+ eee = EarlyExitException(23, self.input)
+ raise eee
+
+ cnt23 += 1
+
+
+ # C.g:626:21: ( Exponent )?
+ alt24 = 2
+ LA24_0 = self.input.LA(1)
+
+ if (LA24_0 == u'E' or LA24_0 == u'e') :
+ alt24 = 1
+ if alt24 == 1:
+ # C.g:626:21: Exponent
+ self.mExponent()
+
+
+
+
+ self.mFloatTypeSuffix()
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end FLOATING_POINT_LITERAL
+
+
+
+ # $ANTLR start Exponent
+ def mExponent(self, ):
+
+ try:
+ # C.g:630:10: ( ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ )
+ # C.g:630:12: ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+
+ if self.input.LA(1) == u'E' or self.input.LA(1) == u'e':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+ # C.g:630:22: ( '+' | '-' )?
+ alt26 = 2
+ LA26_0 = self.input.LA(1)
+
+ if (LA26_0 == u'+' or LA26_0 == u'-') :
+ alt26 = 1
+ if alt26 == 1:
+ # C.g:
+ if self.input.LA(1) == u'+' or self.input.LA(1) == u'-':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+
+ # C.g:630:33: ( '0' .. '9' )+
+ cnt27 = 0
+ while True: #loop27
+ alt27 = 2
+ LA27_0 = self.input.LA(1)
+
+ if ((u'0' <= LA27_0 <= u'9')) :
+ alt27 = 1
+
+
+ if alt27 == 1:
+ # C.g:630:34: '0' .. '9'
+ self.matchRange(u'0', u'9')
+
+
+
+ else:
+ if cnt27 >= 1:
+ break #loop27
+
+ eee = EarlyExitException(27, self.input)
+ raise eee
+
+ cnt27 += 1
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end Exponent
+
+
+
+ # $ANTLR start FloatTypeSuffix
+ def mFloatTypeSuffix(self, ):
+
+ try:
+ # C.g:633:17: ( ( 'f' | 'F' | 'd' | 'D' ) )
+ # C.g:633:19: ( 'f' | 'F' | 'd' | 'D' )
+ if self.input.LA(1) == u'D' or self.input.LA(1) == u'F' or self.input.LA(1) == u'd' or self.input.LA(1) == u'f':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end FloatTypeSuffix
+
+
+
+ # $ANTLR start EscapeSequence
+ def mEscapeSequence(self, ):
+
+ try:
+ # C.g:637:5: ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape )
+ alt28 = 2
+ LA28_0 = self.input.LA(1)
+
+ if (LA28_0 == u'\\') :
+ LA28_1 = self.input.LA(2)
+
+ if (LA28_1 == u'"' or LA28_1 == u'\'' or LA28_1 == u'\\' or LA28_1 == u'b' or LA28_1 == u'f' or LA28_1 == u'n' or LA28_1 == u'r' or LA28_1 == u't') :
+ alt28 = 1
+ elif ((u'0' <= LA28_1 <= u'7')) :
+ alt28 = 2
+ else:
+ nvae = NoViableAltException("635:1: fragment EscapeSequence : ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape );", 28, 1, self.input)
+
+ raise nvae
+
+ else:
+ nvae = NoViableAltException("635:1: fragment EscapeSequence : ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape );", 28, 0, self.input)
+
+ raise nvae
+
+ if alt28 == 1:
+ # C.g:637:8: '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' )
+ self.match(u'\\')
+
+ if self.input.LA(1) == u'"' or self.input.LA(1) == u'\'' or self.input.LA(1) == u'\\' or self.input.LA(1) == u'b' or self.input.LA(1) == u'f' or self.input.LA(1) == u'n' or self.input.LA(1) == u'r' or self.input.LA(1) == u't':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ elif alt28 == 2:
+ # C.g:638:9: OctalEscape
+ self.mOctalEscape()
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end EscapeSequence
+
+
+
+ # $ANTLR start OctalEscape
+ def mOctalEscape(self, ):
+
+ try:
+ # C.g:643:5: ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) )
+ alt29 = 3
+ LA29_0 = self.input.LA(1)
+
+ if (LA29_0 == u'\\') :
+ LA29_1 = self.input.LA(2)
+
+ if ((u'0' <= LA29_1 <= u'3')) :
+ LA29_2 = self.input.LA(3)
+
+ if ((u'0' <= LA29_2 <= u'7')) :
+ LA29_4 = self.input.LA(4)
+
+ if ((u'0' <= LA29_4 <= u'7')) :
+ alt29 = 1
+ else:
+ alt29 = 2
+ else:
+ alt29 = 3
+ elif ((u'4' <= LA29_1 <= u'7')) :
+ LA29_3 = self.input.LA(3)
+
+ if ((u'0' <= LA29_3 <= u'7')) :
+ alt29 = 2
+ else:
+ alt29 = 3
+ else:
+ nvae = NoViableAltException("641:1: fragment OctalEscape : ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) );", 29, 1, self.input)
+
+ raise nvae
+
+ else:
+ nvae = NoViableAltException("641:1: fragment OctalEscape : ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) );", 29, 0, self.input)
+
+ raise nvae
+
+ if alt29 == 1:
+ # C.g:643:9: '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' )
+ self.match(u'\\')
+
+ # C.g:643:14: ( '0' .. '3' )
+ # C.g:643:15: '0' .. '3'
+ self.matchRange(u'0', u'3')
+
+
+
+
+ # C.g:643:25: ( '0' .. '7' )
+ # C.g:643:26: '0' .. '7'
+ self.matchRange(u'0', u'7')
+
+
+
+
+ # C.g:643:36: ( '0' .. '7' )
+ # C.g:643:37: '0' .. '7'
+ self.matchRange(u'0', u'7')
+
+
+
+
+
+
+ elif alt29 == 2:
+ # C.g:644:9: '\\\\' ( '0' .. '7' ) ( '0' .. '7' )
+ self.match(u'\\')
+
+ # C.g:644:14: ( '0' .. '7' )
+ # C.g:644:15: '0' .. '7'
+ self.matchRange(u'0', u'7')
+
+
+
+
+ # C.g:644:25: ( '0' .. '7' )
+ # C.g:644:26: '0' .. '7'
+ self.matchRange(u'0', u'7')
+
+
+
+
+
+
+ elif alt29 == 3:
+ # C.g:645:9: '\\\\' ( '0' .. '7' )
+ self.match(u'\\')
+
+ # C.g:645:14: ( '0' .. '7' )
+ # C.g:645:15: '0' .. '7'
+ self.matchRange(u'0', u'7')
+
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end OctalEscape
+
+
+
+ # $ANTLR start UnicodeEscape
+ def mUnicodeEscape(self, ):
+
+ try:
+ # C.g:650:5: ( '\\\\' 'u' HexDigit HexDigit HexDigit HexDigit )
+ # C.g:650:9: '\\\\' 'u' HexDigit HexDigit HexDigit HexDigit
+ self.match(u'\\')
+
+ self.match(u'u')
+
+ self.mHexDigit()
+
+ self.mHexDigit()
+
+ self.mHexDigit()
+
+ self.mHexDigit()
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end UnicodeEscape
+
+
+
+ # $ANTLR start WS
+ def mWS(self, ):
+
+ try:
+ self.type = WS
+
+ # C.g:653:5: ( ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' ) )
+ # C.g:653:8: ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' )
+ if (u'\t' <= self.input.LA(1) <= u'\n') or (u'\f' <= self.input.LA(1) <= u'\r') or self.input.LA(1) == u' ':
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+ #action start
+ self.channel=HIDDEN;
+ #action end
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end WS
+
+
+
+ # $ANTLR start BS
+ def mBS(self, ):
+
+ try:
+ self.type = BS
+
+ # C.g:657:5: ( ( '\\\\' ) )
+ # C.g:657:7: ( '\\\\' )
+ # C.g:657:7: ( '\\\\' )
+ # C.g:657:8: '\\\\'
+ self.match(u'\\')
+
+
+
+
+ #action start
+ self.channel=HIDDEN;
+ #action end
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end BS
+
+
+
+ # $ANTLR start UnicodeVocabulary
+ def mUnicodeVocabulary(self, ):
+
+ try:
+ self.type = UnicodeVocabulary
+
+ # C.g:665:5: ( '\\u0003' .. '\\uFFFE' )
+ # C.g:665:7: '\\u0003' .. '\\uFFFE'
+ self.matchRange(u'\u0003', u'\uFFFE')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end UnicodeVocabulary
+
+
+
+ # $ANTLR start COMMENT
+ def mCOMMENT(self, ):
+
+ try:
+ self.type = COMMENT
+
+ # C.g:668:5: ( '/*' ( options {greedy=false; } : . )* '*/' )
+ # C.g:668:9: '/*' ( options {greedy=false; } : . )* '*/'
+ self.match("/*")
+
+
+ # C.g:668:14: ( options {greedy=false; } : . )*
+ while True: #loop30
+ alt30 = 2
+ LA30_0 = self.input.LA(1)
+
+ if (LA30_0 == u'*') :
+ LA30_1 = self.input.LA(2)
+
+ if (LA30_1 == u'/') :
+ alt30 = 2
+ elif ((u'\u0000' <= LA30_1 <= u'.') or (u'0' <= LA30_1 <= u'\uFFFE')) :
+ alt30 = 1
+
+
+ elif ((u'\u0000' <= LA30_0 <= u')') or (u'+' <= LA30_0 <= u'\uFFFE')) :
+ alt30 = 1
+
+
+ if alt30 == 1:
+ # C.g:668:42: .
+ self.matchAny()
+
+
+
+ else:
+ break #loop30
+
+
+ self.match("*/")
+
+
+ #action start
+ self.channel=HIDDEN;
+ #action end
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end COMMENT
+
+
+
+ # $ANTLR start LINE_COMMENT
+ def mLINE_COMMENT(self, ):
+
+ try:
+ self.type = LINE_COMMENT
+
+ # C.g:673:5: ( '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' )
+ # C.g:673:7: '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
+ self.match("//")
+
+
+ # C.g:673:12: (~ ( '\\n' | '\\r' ) )*
+ while True: #loop31
+ alt31 = 2
+ LA31_0 = self.input.LA(1)
+
+ if ((u'\u0000' <= LA31_0 <= u'\t') or (u'\u000B' <= LA31_0 <= u'\f') or (u'\u000E' <= LA31_0 <= u'\uFFFE')) :
+ alt31 = 1
+
+
+ if alt31 == 1:
+ # C.g:673:12: ~ ( '\\n' | '\\r' )
+ if (u'\u0000' <= self.input.LA(1) <= u'\t') or (u'\u000B' <= self.input.LA(1) <= u'\f') or (u'\u000E' <= self.input.LA(1) <= u'\uFFFE'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ else:
+ break #loop31
+
+
+ # C.g:673:26: ( '\\r' )?
+ alt32 = 2
+ LA32_0 = self.input.LA(1)
+
+ if (LA32_0 == u'\r') :
+ alt32 = 1
+ if alt32 == 1:
+ # C.g:673:26: '\\r'
+ self.match(u'\r')
+
+
+
+
+ self.match(u'\n')
+
+ #action start
+ self.channel=HIDDEN;
+ #action end
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end LINE_COMMENT
+
+
+
+ # $ANTLR start LINE_COMMAND
+ def mLINE_COMMAND(self, ):
+
+ try:
+ self.type = LINE_COMMAND
+
+ # C.g:678:5: ( '#' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' )
+ # C.g:678:7: '#' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
+ self.match(u'#')
+
+ # C.g:678:11: (~ ( '\\n' | '\\r' ) )*
+ while True: #loop33
+ alt33 = 2
+ LA33_0 = self.input.LA(1)
+
+ if ((u'\u0000' <= LA33_0 <= u'\t') or (u'\u000B' <= LA33_0 <= u'\f') or (u'\u000E' <= LA33_0 <= u'\uFFFE')) :
+ alt33 = 1
+
+
+ if alt33 == 1:
+ # C.g:678:11: ~ ( '\\n' | '\\r' )
+ if (u'\u0000' <= self.input.LA(1) <= u'\t') or (u'\u000B' <= self.input.LA(1) <= u'\f') or (u'\u000E' <= self.input.LA(1) <= u'\uFFFE'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ else:
+ break #loop33
+
+
+ # C.g:678:25: ( '\\r' )?
+ alt34 = 2
+ LA34_0 = self.input.LA(1)
+
+ if (LA34_0 == u'\r') :
+ alt34 = 1
+ if alt34 == 1:
+ # C.g:678:25: '\\r'
+ self.match(u'\r')
+
+
+
+
+ self.match(u'\n')
+
+ #action start
+ self.channel=HIDDEN;
+ #action end
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end LINE_COMMAND
+
+
+
+ def mTokens(self):
+ # C.g:1:8: ( T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | T33 | T34 | T35 | T36 | T37 | T38 | T39 | T40 | T41 | T42 | T43 | T44 | T45 | T46 | T47 | T48 | T49 | T50 | T51 | T52 | T53 | T54 | T55 | T56 | T57 | T58 | T59 | T60 | T61 | T62 | T63 | T64 | T65 | T66 | T67 | T68 | T69 | T70 | T71 | T72 | T73 | T74 | T75 | T76 | T77 | T78 | T79 | T80 | T81 | T82 | T83 | T84 | T85 | T86 | T87 | T88 | T89 | T90 | T91 | T92 | T93 | T94 | T95 | T96 | T97 | T98 | T99 | T100 | T101 | T102 | T103 | T104 | T105 | T106 | T107 | T108 | T109 | T110 | T111 | T112 | T113 | T114 | T115 | T116 | T117 | IDENTIFIER | CHARACTER_LITERAL | STRING_LITERAL | HEX_LITERAL | DECIMAL_LITERAL | OCTAL_LITERAL | FLOATING_POINT_LITERAL | WS | BS | UnicodeVocabulary | COMMENT | LINE_COMMENT | LINE_COMMAND )
+ alt35 = 106
+ alt35 = self.dfa35.predict(self.input)
+ if alt35 == 1:
+ # C.g:1:10: T25
+ self.mT25()
+
+
+
+ elif alt35 == 2:
+ # C.g:1:14: T26
+ self.mT26()
+
+
+
+ elif alt35 == 3:
+ # C.g:1:18: T27
+ self.mT27()
+
+
+
+ elif alt35 == 4:
+ # C.g:1:22: T28
+ self.mT28()
+
+
+
+ elif alt35 == 5:
+ # C.g:1:26: T29
+ self.mT29()
+
+
+
+ elif alt35 == 6:
+ # C.g:1:30: T30
+ self.mT30()
+
+
+
+ elif alt35 == 7:
+ # C.g:1:34: T31
+ self.mT31()
+
+
+
+ elif alt35 == 8:
+ # C.g:1:38: T32
+ self.mT32()
+
+
+
+ elif alt35 == 9:
+ # C.g:1:42: T33
+ self.mT33()
+
+
+
+ elif alt35 == 10:
+ # C.g:1:46: T34
+ self.mT34()
+
+
+
+ elif alt35 == 11:
+ # C.g:1:50: T35
+ self.mT35()
+
+
+
+ elif alt35 == 12:
+ # C.g:1:54: T36
+ self.mT36()
+
+
+
+ elif alt35 == 13:
+ # C.g:1:58: T37
+ self.mT37()
+
+
+
+ elif alt35 == 14:
+ # C.g:1:62: T38
+ self.mT38()
+
+
+
+ elif alt35 == 15:
+ # C.g:1:66: T39
+ self.mT39()
+
+
+
+ elif alt35 == 16:
+ # C.g:1:70: T40
+ self.mT40()
+
+
+
+ elif alt35 == 17:
+ # C.g:1:74: T41
+ self.mT41()
+
+
+
+ elif alt35 == 18:
+ # C.g:1:78: T42
+ self.mT42()
+
+
+
+ elif alt35 == 19:
+ # C.g:1:82: T43
+ self.mT43()
+
+
+
+ elif alt35 == 20:
+ # C.g:1:86: T44
+ self.mT44()
+
+
+
+ elif alt35 == 21:
+ # C.g:1:90: T45
+ self.mT45()
+
+
+
+ elif alt35 == 22:
+ # C.g:1:94: T46
+ self.mT46()
+
+
+
+ elif alt35 == 23:
+ # C.g:1:98: T47
+ self.mT47()
+
+
+
+ elif alt35 == 24:
+ # C.g:1:102: T48
+ self.mT48()
+
+
+
+ elif alt35 == 25:
+ # C.g:1:106: T49
+ self.mT49()
+
+
+
+ elif alt35 == 26:
+ # C.g:1:110: T50
+ self.mT50()
+
+
+
+ elif alt35 == 27:
+ # C.g:1:114: T51
+ self.mT51()
+
+
+
+ elif alt35 == 28:
+ # C.g:1:118: T52
+ self.mT52()
+
+
+
+ elif alt35 == 29:
+ # C.g:1:122: T53
+ self.mT53()
+
+
+
+ elif alt35 == 30:
+ # C.g:1:126: T54
+ self.mT54()
+
+
+
+ elif alt35 == 31:
+ # C.g:1:130: T55
+ self.mT55()
+
+
+
+ elif alt35 == 32:
+ # C.g:1:134: T56
+ self.mT56()
+
+
+
+ elif alt35 == 33:
+ # C.g:1:138: T57
+ self.mT57()
+
+
+
+ elif alt35 == 34:
+ # C.g:1:142: T58
+ self.mT58()
+
+
+
+ elif alt35 == 35:
+ # C.g:1:146: T59
+ self.mT59()
+
+
+
+ elif alt35 == 36:
+ # C.g:1:150: T60
+ self.mT60()
+
+
+
+ elif alt35 == 37:
+ # C.g:1:154: T61
+ self.mT61()
+
+
+
+ elif alt35 == 38:
+ # C.g:1:158: T62
+ self.mT62()
+
+
+
+ elif alt35 == 39:
+ # C.g:1:162: T63
+ self.mT63()
+
+
+
+ elif alt35 == 40:
+ # C.g:1:166: T64
+ self.mT64()
+
+
+
+ elif alt35 == 41:
+ # C.g:1:170: T65
+ self.mT65()
+
+
+
+ elif alt35 == 42:
+ # C.g:1:174: T66
+ self.mT66()
+
+
+
+ elif alt35 == 43:
+ # C.g:1:178: T67
+ self.mT67()
+
+
+
+ elif alt35 == 44:
+ # C.g:1:182: T68
+ self.mT68()
+
+
+
+ elif alt35 == 45:
+ # C.g:1:186: T69
+ self.mT69()
+
+
+
+ elif alt35 == 46:
+ # C.g:1:190: T70
+ self.mT70()
+
+
+
+ elif alt35 == 47:
+ # C.g:1:194: T71
+ self.mT71()
+
+
+
+ elif alt35 == 48:
+ # C.g:1:198: T72
+ self.mT72()
+
+
+
+ elif alt35 == 49:
+ # C.g:1:202: T73
+ self.mT73()
+
+
+
+ elif alt35 == 50:
+ # C.g:1:206: T74
+ self.mT74()
+
+
+
+ elif alt35 == 51:
+ # C.g:1:210: T75
+ self.mT75()
+
+
+
+ elif alt35 == 52:
+ # C.g:1:214: T76
+ self.mT76()
+
+
+
+ elif alt35 == 53:
+ # C.g:1:218: T77
+ self.mT77()
+
+
+
+ elif alt35 == 54:
+ # C.g:1:222: T78
+ self.mT78()
+
+
+
+ elif alt35 == 55:
+ # C.g:1:226: T79
+ self.mT79()
+
+
+
+ elif alt35 == 56:
+ # C.g:1:230: T80
+ self.mT80()
+
+
+
+ elif alt35 == 57:
+ # C.g:1:234: T81
+ self.mT81()
+
+
+
+ elif alt35 == 58:
+ # C.g:1:238: T82
+ self.mT82()
+
+
+
+ elif alt35 == 59:
+ # C.g:1:242: T83
+ self.mT83()
+
+
+
+ elif alt35 == 60:
+ # C.g:1:246: T84
+ self.mT84()
+
+
+
+ elif alt35 == 61:
+ # C.g:1:250: T85
+ self.mT85()
+
+
+
+ elif alt35 == 62:
+ # C.g:1:254: T86
+ self.mT86()
+
+
+
+ elif alt35 == 63:
+ # C.g:1:258: T87
+ self.mT87()
+
+
+
+ elif alt35 == 64:
+ # C.g:1:262: T88
+ self.mT88()
+
+
+
+ elif alt35 == 65:
+ # C.g:1:266: T89
+ self.mT89()
+
+
+
+ elif alt35 == 66:
+ # C.g:1:270: T90
+ self.mT90()
+
+
+
+ elif alt35 == 67:
+ # C.g:1:274: T91
+ self.mT91()
+
+
+
+ elif alt35 == 68:
+ # C.g:1:278: T92
+ self.mT92()
+
+
+
+ elif alt35 == 69:
+ # C.g:1:282: T93
+ self.mT93()
+
+
+
+ elif alt35 == 70:
+ # C.g:1:286: T94
+ self.mT94()
+
+
+
+ elif alt35 == 71:
+ # C.g:1:290: T95
+ self.mT95()
+
+
+
+ elif alt35 == 72:
+ # C.g:1:294: T96
+ self.mT96()
+
+
+
+ elif alt35 == 73:
+ # C.g:1:298: T97
+ self.mT97()
+
+
+
+ elif alt35 == 74:
+ # C.g:1:302: T98
+ self.mT98()
+
+
+
+ elif alt35 == 75:
+ # C.g:1:306: T99
+ self.mT99()
+
+
+
+ elif alt35 == 76:
+ # C.g:1:310: T100
+ self.mT100()
+
+
+
+ elif alt35 == 77:
+ # C.g:1:315: T101
+ self.mT101()
+
+
+
+ elif alt35 == 78:
+ # C.g:1:320: T102
+ self.mT102()
+
+
+
+ elif alt35 == 79:
+ # C.g:1:325: T103
+ self.mT103()
+
+
+
+ elif alt35 == 80:
+ # C.g:1:330: T104
+ self.mT104()
+
+
+
+ elif alt35 == 81:
+ # C.g:1:335: T105
+ self.mT105()
+
+
+
+ elif alt35 == 82:
+ # C.g:1:340: T106
+ self.mT106()
+
+
+
+ elif alt35 == 83:
+ # C.g:1:345: T107
+ self.mT107()
+
+
+
+ elif alt35 == 84:
+ # C.g:1:350: T108
+ self.mT108()
+
+
+
+ elif alt35 == 85:
+ # C.g:1:355: T109
+ self.mT109()
+
+
+
+ elif alt35 == 86:
+ # C.g:1:360: T110
+ self.mT110()
+
+
+
+ elif alt35 == 87:
+ # C.g:1:365: T111
+ self.mT111()
+
+
+
+ elif alt35 == 88:
+ # C.g:1:370: T112
+ self.mT112()
+
+
+
+ elif alt35 == 89:
+ # C.g:1:375: T113
+ self.mT113()
+
+
+
+ elif alt35 == 90:
+ # C.g:1:380: T114
+ self.mT114()
+
+
+
+ elif alt35 == 91:
+ # C.g:1:385: T115
+ self.mT115()
+
+
+
+ elif alt35 == 92:
+ # C.g:1:390: T116
+ self.mT116()
+
+
+
+ elif alt35 == 93:
+ # C.g:1:395: T117
+ self.mT117()
+
+
+
+ elif alt35 == 94:
+ # C.g:1:400: IDENTIFIER
+ self.mIDENTIFIER()
+
+
+
+ elif alt35 == 95:
+ # C.g:1:411: CHARACTER_LITERAL
+ self.mCHARACTER_LITERAL()
+
+
+
+ elif alt35 == 96:
+ # C.g:1:429: STRING_LITERAL
+ self.mSTRING_LITERAL()
+
+
+
+ elif alt35 == 97:
+ # C.g:1:444: HEX_LITERAL
+ self.mHEX_LITERAL()
+
+
+
+ elif alt35 == 98:
+ # C.g:1:456: DECIMAL_LITERAL
+ self.mDECIMAL_LITERAL()
+
+
+
+ elif alt35 == 99:
+ # C.g:1:472: OCTAL_LITERAL
+ self.mOCTAL_LITERAL()
+
+
+
+ elif alt35 == 100:
+ # C.g:1:486: FLOATING_POINT_LITERAL
+ self.mFLOATING_POINT_LITERAL()
+
+
+
+ elif alt35 == 101:
+ # C.g:1:509: WS
+ self.mWS()
+
+
+
+ elif alt35 == 102:
+ # C.g:1:512: BS
+ self.mBS()
+
+
+
+ elif alt35 == 103:
+ # C.g:1:515: UnicodeVocabulary
+ self.mUnicodeVocabulary()
+
+
+
+ elif alt35 == 104:
+ # C.g:1:533: COMMENT
+ self.mCOMMENT()
+
+
+
+ elif alt35 == 105:
+ # C.g:1:541: LINE_COMMENT
+ self.mLINE_COMMENT()
+
+
+
+ elif alt35 == 106:
+ # C.g:1:554: LINE_COMMAND
+ self.mLINE_COMMAND()
+
+
+
+
+
+
+
+
+ # lookup tables for DFA #25
+
+ DFA25_eot = DFA.unpack(
+ u"\7\uffff\1\10\2\uffff"
+ )
+
+ DFA25_eof = DFA.unpack(
+ u"\12\uffff"
+ )
+
+ DFA25_min = DFA.unpack(
+ u"\2\56\2\uffff\1\53\1\uffff\2\60\2\uffff"
+ )
+
+ DFA25_max = DFA.unpack(
+ u"\1\71\1\146\2\uffff\1\71\1\uffff\1\71\1\146\2\uffff"
+ )
+
+ DFA25_accept = DFA.unpack(
+ u"\2\uffff\1\2\1\1\1\uffff\1\4\2\uffff\2\3"
+ )
+
+ DFA25_special = DFA.unpack(
+ u"\12\uffff"
+ )
+
+
+ DFA25_transition = [
+ DFA.unpack(u"\1\2\1\uffff\12\1"),
+ DFA.unpack(u"\1\3\1\uffff\12\1\12\uffff\1\5\1\4\1\5\35\uffff\1\5"
+ u"\1\4\1\5"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\6\1\uffff\1\6\2\uffff\12\7"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\12\7"),
+ DFA.unpack(u"\12\7\12\uffff\1\11\1\uffff\1\11\35\uffff\1\11\1\uffff"
+ u"\1\11"),
+ DFA.unpack(u""),
+ DFA.unpack(u"")
+ ]
+
+ # class definition for DFA #25
+
+ DFA25 = DFA
+ # lookup tables for DFA #35
+
+ DFA35_eot = DFA.unpack(
+ u"\2\uffff\1\76\1\uffff\1\101\14\76\3\uffff\10\76\4\uffff\1\151\1"
+ u"\153\1\157\1\163\1\167\1\171\1\174\1\uffff\1\177\1\u0082\1\u0085"
+ u"\1\u0087\1\u008a\1\uffff\5\76\1\uffff\2\73\2\u0095\2\uffff\1\73"
+ u"\2\uffff\1\76\4\uffff\16\76\1\u00ad\5\76\1\u00b4\1\76\3\uffff\1"
+ u"\u00b7\10\76\34\uffff\1\u00c1\2\uffff\1\u00c3\10\uffff\5\76\3\uffff"
+ u"\1\u00c9\1\uffff\1\u0095\3\uffff\23\76\1\uffff\1\u00de\1\76\1\u00e0"
+ u"\3\76\1\uffff\2\76\1\uffff\1\76\1\u00e7\6\76\4\uffff\5\76\1\uffff"
+ u"\1\76\1\u00f5\1\76\1\u00f7\6\76\1\u00fe\4\76\1\u0103\1\u0104\2"
+ u"\76\1\u0107\1\uffff\1\u0108\1\uffff\6\76\1\uffff\10\76\1\u0118"
+ u"\1\76\1\u011a\2\76\1\uffff\1\76\1\uffff\5\76\1\u0123\1\uffff\4"
+ u"\76\2\uffff\1\76\1\u0129\2\uffff\1\u012a\3\76\1\u012e\1\76\1\u0130"
+ u"\7\76\1\u0139\1\uffff\1\u013a\1\uffff\1\u013b\1\76\1\u013d\1\u013e"
+ u"\1\u013f\1\u0140\1\u0141\1\u0142\1\uffff\1\76\1\u0144\1\u0145\2"
+ u"\76\2\uffff\1\76\1\u0149\1\76\1\uffff\1\76\1\uffff\5\76\1\u0151"
+ u"\1\u0152\1\76\3\uffff\1\u0154\6\uffff\1\76\2\uffff\2\76\1\u0158"
+ u"\1\uffff\7\76\2\uffff\1\u0160\1\uffff\1\u0161\1\u0162\1\u0163\1"
+ u"\uffff\1\u0164\1\u0165\1\76\1\u0167\3\76\6\uffff\1\u016b\1\uffff"
+ u"\3\76\1\uffff\21\76\1\u0180\2\76\1\uffff\3\76\1\u0186\1\76\1\uffff"
+ u"\11\76\1\u0191\1\uffff"
+ )
+
+ DFA35_eof = DFA.unpack(
+ u"\u0192\uffff"
+ )
+
+ DFA35_min = DFA.unpack(
+ u"\1\3\1\uffff\1\171\1\uffff\1\75\1\154\1\150\1\165\1\145\1\124\1"
+ u"\157\1\141\1\146\1\157\1\154\1\145\1\156\3\uffff\1\116\1\120\1"
+ u"\117\1\116\1\117\1\114\1\106\1\101\4\uffff\1\75\1\56\1\53\1\55"
+ u"\1\52\1\75\1\46\1\uffff\1\75\1\74\3\75\1\uffff\1\137\1\150\1\157"
+ u"\1\162\1\42\1\uffff\2\0\2\56\2\uffff\1\0\2\uffff\1\160\4\uffff"
+ u"\1\163\1\164\1\165\1\151\1\141\1\147\1\157\1\164\1\147\1\101\1"
+ u"\151\1\163\1\156\1\141\1\44\1\164\1\156\1\162\1\157\1\146\1\44"
+ u"\1\151\3\uffff\1\44\2\124\1\116\1\101\1\114\1\117\1\111\1\103\34"
+ u"\uffff\1\75\2\uffff\1\75\10\uffff\1\141\1\163\1\151\1\164\1\145"
+ u"\3\uffff\1\56\1\uffff\1\56\3\uffff\3\145\1\155\2\164\1\165\1\145"
+ u"\1\156\1\162\1\157\1\151\1\165\1\124\1\141\1\144\1\145\1\163\1"
+ u"\162\1\uffff\1\44\1\147\1\44\2\141\1\142\1\uffff\1\151\1\157\1"
+ u"\uffff\1\111\1\44\1\123\1\114\1\101\1\102\1\101\1\113\4\uffff\1"
+ u"\163\1\155\1\154\1\157\1\141\1\uffff\1\144\1\44\1\162\1\44\1\143"
+ u"\1\151\1\143\1\157\1\145\1\164\1\44\1\163\1\162\1\111\1\164\2\44"
+ u"\1\151\1\164\1\44\1\uffff\1\44\1\uffff\1\164\1\165\1\154\1\147"
+ u"\1\156\1\117\1\uffff\1\124\1\111\1\124\1\101\1\102\1\120\1\105"
+ u"\1\155\1\44\1\145\1\44\1\153\1\145\1\uffff\1\156\1\uffff\1\150"
+ u"\1\143\1\164\1\146\1\144\1\44\1\uffff\1\164\1\156\1\103\1\151\2"
+ u"\uffff\1\156\1\44\2\uffff\1\44\1\154\1\145\1\156\1\44\1\116\1\44"
+ u"\1\107\1\111\1\114\1\125\1\117\1\111\1\104\1\44\1\uffff\1\44\1"
+ u"\uffff\1\44\1\146\6\44\1\uffff\1\145\2\44\1\154\1\165\2\uffff\1"
+ u"\164\1\44\1\145\1\uffff\1\101\1\uffff\1\116\1\114\1\137\1\116\1"
+ u"\117\2\44\1\137\3\uffff\1\44\6\uffff\1\162\2\uffff\2\145\1\44\1"
+ u"\uffff\1\144\1\114\2\105\1\122\2\124\2\uffff\1\44\1\uffff\3\44"
+ u"\1\uffff\2\44\1\104\1\44\1\105\1\111\1\123\6\uffff\1\44\1\uffff"
+ u"\2\115\1\105\1\uffff\1\117\1\105\1\122\1\126\1\123\1\126\2\105"
+ u"\1\111\1\137\1\122\1\103\1\111\1\126\1\105\1\106\1\111\1\44\1\137"
+ u"\1\103\1\uffff\1\125\1\105\1\116\1\44\1\122\1\uffff\1\105\1\106"
+ u"\1\105\1\122\1\105\1\116\1\103\1\105\1\104\1\44\1\uffff"
+ )
+
+ DFA35_max = DFA.unpack(
+ u"\1\ufffe\1\uffff\1\171\1\uffff\1\75\1\170\1\167\1\165\1\145\1\124"
+ u"\2\157\1\156\3\157\1\156\3\uffff\1\116\1\125\1\117\1\116\1\117"
+ u"\1\114\1\106\1\101\4\uffff\1\75\1\71\1\75\1\76\3\75\1\uffff\2\75"
+ u"\1\76\1\75\1\174\1\uffff\1\141\1\150\1\157\1\162\1\47\1\uffff\2"
+ u"\ufffe\1\170\1\146\2\uffff\1\ufffe\2\uffff\1\160\4\uffff\1\163"
+ u"\1\164\1\165\1\151\1\162\1\172\1\157\2\164\1\101\1\154\1\163\1"
+ u"\156\1\141\1\172\1\164\1\156\1\162\1\157\1\146\1\172\1\163\3\uffff"
+ u"\1\172\2\124\1\116\1\101\1\114\1\117\1\111\1\103\34\uffff\1\75"
+ u"\2\uffff\1\75\10\uffff\1\141\1\163\1\151\1\164\1\145\3\uffff\1"
+ u"\146\1\uffff\1\146\3\uffff\3\145\1\155\2\164\1\165\1\145\1\156"
+ u"\1\162\1\157\1\151\1\165\1\124\1\141\1\144\1\145\1\164\1\162\1"
+ u"\uffff\1\172\1\147\1\172\2\141\1\142\1\uffff\1\151\1\157\1\uffff"
+ u"\1\111\1\172\1\123\1\114\1\101\1\102\1\137\1\113\4\uffff\1\163"
+ u"\1\155\1\154\1\157\1\141\1\uffff\1\144\1\172\1\162\1\172\1\143"
+ u"\1\151\1\143\1\157\1\145\1\164\1\172\1\163\1\162\1\111\1\164\2"
+ u"\172\1\151\1\164\1\172\1\uffff\1\172\1\uffff\1\164\1\165\1\154"
+ u"\1\147\1\156\1\117\1\uffff\1\124\1\111\1\124\1\101\1\122\1\120"
+ u"\1\105\1\155\1\172\1\145\1\172\1\153\1\145\1\uffff\1\156\1\uffff"
+ u"\1\150\1\143\1\164\1\146\1\144\1\172\1\uffff\1\164\1\156\1\103"
+ u"\1\151\2\uffff\1\156\1\172\2\uffff\1\172\1\154\1\145\1\156\1\172"
+ u"\1\116\1\172\1\107\1\111\1\114\1\125\1\117\1\111\1\104\1\172\1"
+ u"\uffff\1\172\1\uffff\1\172\1\146\6\172\1\uffff\1\145\2\172\1\154"
+ u"\1\165\2\uffff\1\164\1\172\1\145\1\uffff\1\101\1\uffff\1\116\1"
+ u"\114\1\137\1\116\1\117\2\172\1\137\3\uffff\1\172\6\uffff\1\162"
+ u"\2\uffff\2\145\1\172\1\uffff\1\144\1\114\2\105\1\122\2\124\2\uffff"
+ u"\1\172\1\uffff\3\172\1\uffff\2\172\1\104\1\172\1\105\1\111\1\123"
+ u"\6\uffff\1\172\1\uffff\2\115\1\105\1\uffff\1\117\1\105\1\122\1"
+ u"\126\1\123\1\126\2\105\1\111\1\137\1\122\1\103\1\111\1\126\1\105"
+ u"\1\106\1\111\1\172\1\137\1\103\1\uffff\1\125\1\105\1\116\1\172"
+ u"\1\122\1\uffff\1\105\1\106\1\105\1\122\1\105\1\116\1\103\1\105"
+ u"\1\104\1\172\1\uffff"
+ )
+
+ DFA35_accept = DFA.unpack(
+ u"\1\uffff\1\1\1\uffff\1\3\15\uffff\1\23\1\24\1\27\10\uffff\1\46"
+ u"\1\47\1\50\1\51\7\uffff\1\66\5\uffff\1\102\5\uffff\1\136\4\uffff"
+ u"\1\145\1\146\1\uffff\1\147\1\1\1\uffff\1\136\1\3\1\107\1\4\26\uffff"
+ u"\1\23\1\24\1\27\11\uffff\1\46\1\47\1\50\1\51\1\70\1\52\1\53\1\63"
+ u"\1\144\1\73\1\60\1\54\1\74\1\64\1\61\1\55\1\150\1\151\1\71\1\56"
+ u"\1\72\1\57\1\77\1\104\1\65\1\66\1\110\1\67\1\uffff\1\113\1\111"
+ u"\1\uffff\1\114\1\112\1\100\1\106\1\103\1\101\1\105\1\102\5\uffff"
+ u"\1\140\1\137\1\141\1\uffff\1\142\1\uffff\1\145\1\146\1\152\23\uffff"
+ u"\1\124\6\uffff\1\130\2\uffff\1\33\10\uffff\1\75\1\115\1\76\1\116"
+ u"\5\uffff\1\143\24\uffff\1\15\1\uffff\1\131\6\uffff\1\34\15\uffff"
+ u"\1\125\1\uffff\1\30\6\uffff\1\7\4\uffff\1\12\1\122\2\uffff\1\13"
+ u"\1\16\17\uffff\1\120\1\uffff\1\132\10\uffff\1\14\5\uffff\1\31\1"
+ u"\17\3\uffff\1\26\1\uffff\1\36\10\uffff\1\121\1\127\1\134\1\uffff"
+ u"\1\5\1\126\1\6\1\25\1\62\1\21\1\uffff\1\135\1\11\3\uffff\1\20\7"
+ u"\uffff\1\42\1\45\1\uffff\1\2\3\uffff\1\123\7\uffff\1\117\1\10\1"
+ u"\32\1\133\1\22\1\35\1\uffff\1\40\3\uffff\1\37\24\uffff\1\43\5\uffff"
+ u"\1\44\12\uffff\1\41"
+ )
+
+ DFA35_special = DFA.unpack(
+ u"\u0192\uffff"
+ )
+
+
+ DFA35_transition = [
+ DFA.unpack(u"\6\73\2\70\1\73\2\70\22\73\1\70\1\50\1\65\1\72\1\63"
+ u"\1\45\1\46\1\64\1\34\1\35\1\40\1\42\1\3\1\43\1\41\1\44\1\66\11"
+ u"\67\1\23\1\1\1\51\1\4\1\52\1\55\1\73\2\63\1\26\1\63\1\32\1\63\1"
+ u"\31\1\63\1\24\2\63\1\62\2\63\1\25\1\33\2\63\1\11\1\63\1\27\1\30"
+ u"\4\63\1\36\1\71\1\37\1\53\1\56\1\73\1\7\1\61\1\13\1\17\1\5\1\16"
+ u"\1\60\1\63\1\14\2\63\1\15\5\63\1\10\1\6\1\2\1\20\1\12\1\57\3\63"
+ u"\1\21\1\54\1\22\1\47\uff80\73"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\75"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\100"),
+ DFA.unpack(u"\1\102\1\uffff\1\104\11\uffff\1\103"),
+ DFA.unpack(u"\1\110\1\107\12\uffff\1\106\2\uffff\1\105"),
+ DFA.unpack(u"\1\111"),
+ DFA.unpack(u"\1\112"),
+ DFA.unpack(u"\1\113"),
+ DFA.unpack(u"\1\114"),
+ DFA.unpack(u"\1\115\6\uffff\1\117\6\uffff\1\116"),
+ DFA.unpack(u"\1\120\7\uffff\1\121"),
+ DFA.unpack(u"\1\122"),
+ DFA.unpack(u"\1\124\2\uffff\1\123"),
+ DFA.unpack(u"\1\125\11\uffff\1\126"),
+ DFA.unpack(u"\1\127"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\133"),
+ DFA.unpack(u"\1\134\4\uffff\1\135"),
+ DFA.unpack(u"\1\136"),
+ DFA.unpack(u"\1\137"),
+ DFA.unpack(u"\1\140"),
+ DFA.unpack(u"\1\141"),
+ DFA.unpack(u"\1\142"),
+ DFA.unpack(u"\1\143"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\150"),
+ DFA.unpack(u"\1\152\1\uffff\12\154"),
+ DFA.unpack(u"\1\156\21\uffff\1\155"),
+ DFA.unpack(u"\1\162\17\uffff\1\160\1\161"),
+ DFA.unpack(u"\1\164\4\uffff\1\165\15\uffff\1\166"),
+ DFA.unpack(u"\1\170"),
+ DFA.unpack(u"\1\173\26\uffff\1\172"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\176"),
+ DFA.unpack(u"\1\u0080\1\u0081"),
+ DFA.unpack(u"\1\u0084\1\u0083"),
+ DFA.unpack(u"\1\u0086"),
+ DFA.unpack(u"\1\u0089\76\uffff\1\u0088"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u008c\1\uffff\1\u008d"),
+ DFA.unpack(u"\1\u008e"),
+ DFA.unpack(u"\1\u008f"),
+ DFA.unpack(u"\1\u0090"),
+ DFA.unpack(u"\1\u0091\4\uffff\1\u0092"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\47\u0092\1\uffff\uffd7\u0092"),
+ DFA.unpack(u"\uffff\u0091"),
+ DFA.unpack(u"\1\154\1\uffff\10\u0094\2\154\12\uffff\3\154\21\uffff"
+ u"\1\u0093\13\uffff\3\154\21\uffff\1\u0093"),
+ DFA.unpack(u"\1\154\1\uffff\12\u0096\12\uffff\3\154\35\uffff\3\154"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\uffff\u0099"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u009a"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u009b"),
+ DFA.unpack(u"\1\u009c"),
+ DFA.unpack(u"\1\u009d"),
+ DFA.unpack(u"\1\u009e"),
+ DFA.unpack(u"\1\u009f\20\uffff\1\u00a0"),
+ DFA.unpack(u"\1\u00a2\22\uffff\1\u00a1"),
+ DFA.unpack(u"\1\u00a3"),
+ DFA.unpack(u"\1\u00a4"),
+ DFA.unpack(u"\1\u00a5\14\uffff\1\u00a6"),
+ DFA.unpack(u"\1\u00a7"),
+ DFA.unpack(u"\1\u00a9\2\uffff\1\u00a8"),
+ DFA.unpack(u"\1\u00aa"),
+ DFA.unpack(u"\1\u00ab"),
+ DFA.unpack(u"\1\u00ac"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00ae"),
+ DFA.unpack(u"\1\u00af"),
+ DFA.unpack(u"\1\u00b0"),
+ DFA.unpack(u"\1\u00b1"),
+ DFA.unpack(u"\1\u00b2"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\24\76\1\u00b3\5\76"),
+ DFA.unpack(u"\1\u00b6\11\uffff\1\u00b5"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00b8"),
+ DFA.unpack(u"\1\u00b9"),
+ DFA.unpack(u"\1\u00ba"),
+ DFA.unpack(u"\1\u00bb"),
+ DFA.unpack(u"\1\u00bc"),
+ DFA.unpack(u"\1\u00bd"),
+ DFA.unpack(u"\1\u00be"),
+ DFA.unpack(u"\1\u00bf"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00c0"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00c2"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00c4"),
+ DFA.unpack(u"\1\u00c5"),
+ DFA.unpack(u"\1\u00c6"),
+ DFA.unpack(u"\1\u00c7"),
+ DFA.unpack(u"\1\u00c8"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\154\1\uffff\10\u0094\2\154\12\uffff\3\154\35\uffff"
+ u"\3\154"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\154\1\uffff\12\u0096\12\uffff\3\154\35\uffff\3\154"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00ca"),
+ DFA.unpack(u"\1\u00cb"),
+ DFA.unpack(u"\1\u00cc"),
+ DFA.unpack(u"\1\u00cd"),
+ DFA.unpack(u"\1\u00ce"),
+ DFA.unpack(u"\1\u00cf"),
+ DFA.unpack(u"\1\u00d0"),
+ DFA.unpack(u"\1\u00d1"),
+ DFA.unpack(u"\1\u00d2"),
+ DFA.unpack(u"\1\u00d3"),
+ DFA.unpack(u"\1\u00d4"),
+ DFA.unpack(u"\1\u00d5"),
+ DFA.unpack(u"\1\u00d6"),
+ DFA.unpack(u"\1\u00d7"),
+ DFA.unpack(u"\1\u00d8"),
+ DFA.unpack(u"\1\u00d9"),
+ DFA.unpack(u"\1\u00da"),
+ DFA.unpack(u"\1\u00dc\1\u00db"),
+ DFA.unpack(u"\1\u00dd"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00df"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00e1"),
+ DFA.unpack(u"\1\u00e2"),
+ DFA.unpack(u"\1\u00e3"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00e4"),
+ DFA.unpack(u"\1\u00e5"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00e6"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00e8"),
+ DFA.unpack(u"\1\u00e9"),
+ DFA.unpack(u"\1\u00ea"),
+ DFA.unpack(u"\1\u00eb"),
+ DFA.unpack(u"\1\u00ed\35\uffff\1\u00ec"),
+ DFA.unpack(u"\1\u00ee"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00ef"),
+ DFA.unpack(u"\1\u00f0"),
+ DFA.unpack(u"\1\u00f1"),
+ DFA.unpack(u"\1\u00f2"),
+ DFA.unpack(u"\1\u00f3"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00f4"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00f6"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00f8"),
+ DFA.unpack(u"\1\u00f9"),
+ DFA.unpack(u"\1\u00fa"),
+ DFA.unpack(u"\1\u00fb"),
+ DFA.unpack(u"\1\u00fc"),
+ DFA.unpack(u"\1\u00fd"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u00ff"),
+ DFA.unpack(u"\1\u0100"),
+ DFA.unpack(u"\1\u0101"),
+ DFA.unpack(u"\1\u0102"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0105"),
+ DFA.unpack(u"\1\u0106"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0109"),
+ DFA.unpack(u"\1\u010a"),
+ DFA.unpack(u"\1\u010b"),
+ DFA.unpack(u"\1\u010c"),
+ DFA.unpack(u"\1\u010d"),
+ DFA.unpack(u"\1\u010e"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u010f"),
+ DFA.unpack(u"\1\u0110"),
+ DFA.unpack(u"\1\u0111"),
+ DFA.unpack(u"\1\u0112"),
+ DFA.unpack(u"\1\u0114\17\uffff\1\u0113"),
+ DFA.unpack(u"\1\u0115"),
+ DFA.unpack(u"\1\u0116"),
+ DFA.unpack(u"\1\u0117"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0119"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u011b"),
+ DFA.unpack(u"\1\u011c"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u011d"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u011e"),
+ DFA.unpack(u"\1\u011f"),
+ DFA.unpack(u"\1\u0120"),
+ DFA.unpack(u"\1\u0121"),
+ DFA.unpack(u"\1\u0122"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0124"),
+ DFA.unpack(u"\1\u0125"),
+ DFA.unpack(u"\1\u0126"),
+ DFA.unpack(u"\1\u0127"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0128"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u012b"),
+ DFA.unpack(u"\1\u012c"),
+ DFA.unpack(u"\1\u012d"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u012f"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0131"),
+ DFA.unpack(u"\1\u0132"),
+ DFA.unpack(u"\1\u0133"),
+ DFA.unpack(u"\1\u0134"),
+ DFA.unpack(u"\1\u0135"),
+ DFA.unpack(u"\1\u0136"),
+ DFA.unpack(u"\1\u0137"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\u0138\1"
+ u"\uffff\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u013c"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0143"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0146"),
+ DFA.unpack(u"\1\u0147"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0148"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u014a"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u014b"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u014c"),
+ DFA.unpack(u"\1\u014d"),
+ DFA.unpack(u"\1\u014e"),
+ DFA.unpack(u"\1\u014f"),
+ DFA.unpack(u"\1\u0150"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0153"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0155"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0156"),
+ DFA.unpack(u"\1\u0157"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0159"),
+ DFA.unpack(u"\1\u015a"),
+ DFA.unpack(u"\1\u015b"),
+ DFA.unpack(u"\1\u015c"),
+ DFA.unpack(u"\1\u015d"),
+ DFA.unpack(u"\1\u015e"),
+ DFA.unpack(u"\1\u015f"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0166"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0168"),
+ DFA.unpack(u"\1\u0169"),
+ DFA.unpack(u"\1\u016a"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u016c"),
+ DFA.unpack(u"\1\u016d"),
+ DFA.unpack(u"\1\u016e"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u016f"),
+ DFA.unpack(u"\1\u0170"),
+ DFA.unpack(u"\1\u0171"),
+ DFA.unpack(u"\1\u0172"),
+ DFA.unpack(u"\1\u0173"),
+ DFA.unpack(u"\1\u0174"),
+ DFA.unpack(u"\1\u0175"),
+ DFA.unpack(u"\1\u0176"),
+ DFA.unpack(u"\1\u0177"),
+ DFA.unpack(u"\1\u0178"),
+ DFA.unpack(u"\1\u0179"),
+ DFA.unpack(u"\1\u017a"),
+ DFA.unpack(u"\1\u017b"),
+ DFA.unpack(u"\1\u017c"),
+ DFA.unpack(u"\1\u017d"),
+ DFA.unpack(u"\1\u017e"),
+ DFA.unpack(u"\1\u017f"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0181"),
+ DFA.unpack(u"\1\u0182"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0183"),
+ DFA.unpack(u"\1\u0184"),
+ DFA.unpack(u"\1\u0185"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"\1\u0187"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0188"),
+ DFA.unpack(u"\1\u0189"),
+ DFA.unpack(u"\1\u018a"),
+ DFA.unpack(u"\1\u018b"),
+ DFA.unpack(u"\1\u018c"),
+ DFA.unpack(u"\1\u018d"),
+ DFA.unpack(u"\1\u018e"),
+ DFA.unpack(u"\1\u018f"),
+ DFA.unpack(u"\1\u0190"),
+ DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
+ u"\32\76"),
+ DFA.unpack(u"")
+ ]
+
+ # class definition for DFA #35
+
+ DFA35 = DFA
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser3/CParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser3/CParser.py
new file mode 100755
index 00000000..9c718841
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser3/CParser.py
@@ -0,0 +1,18833 @@
+# $ANTLR 3.0.1 C.g 2010-02-23 09:58:53
+
+from __future__ import print_function
+from __future__ import absolute_import
+from antlr3 import *
+from antlr3.compat import set, frozenset
+
+## @file
+# The file defines the parser for C source files.
+#
+# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+from . import CodeFragment
+from . import FileProfile
+
+
+
+# for convenience in actions
+HIDDEN = BaseRecognizer.HIDDEN
+
+# token types
+BS=20
+LINE_COMMENT=23
+FloatTypeSuffix=16
+IntegerTypeSuffix=14
+LETTER=11
+OCTAL_LITERAL=6
+CHARACTER_LITERAL=8
+Exponent=15
+EOF=-1
+HexDigit=13
+STRING_LITERAL=9
+WS=19
+FLOATING_POINT_LITERAL=10
+IDENTIFIER=4
+UnicodeEscape=18
+LINE_COMMAND=24
+UnicodeVocabulary=21
+HEX_LITERAL=5
+COMMENT=22
+DECIMAL_LITERAL=7
+EscapeSequence=12
+OctalEscape=17
+
+# token names
+tokenNames = [
+ "<invalid>", "<EOR>", "<DOWN>", "<UP>",
+ "IDENTIFIER", "HEX_LITERAL", "OCTAL_LITERAL", "DECIMAL_LITERAL", "CHARACTER_LITERAL",
+ "STRING_LITERAL", "FLOATING_POINT_LITERAL", "LETTER", "EscapeSequence",
+ "HexDigit", "IntegerTypeSuffix", "Exponent", "FloatTypeSuffix", "OctalEscape",
+ "UnicodeEscape", "WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
+ "LINE_COMMAND", "';'", "'typedef'", "','", "'='", "'extern'", "'static'",
+ "'auto'", "'register'", "'STATIC'", "'void'", "'char'", "'short'", "'int'",
+ "'long'", "'float'", "'double'", "'signed'", "'unsigned'", "'{'", "'}'",
+ "'struct'", "'union'", "':'", "'enum'", "'const'", "'volatile'", "'IN'",
+ "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'", "'VOLATILE'", "'GLOBAL_REMOVE_IF_UNREFERENCED'",
+ "'EFIAPI'", "'EFI_BOOTSERVICE'", "'EFI_RUNTIMESERVICE'", "'PACKED'",
+ "'('", "')'", "'['", "']'", "'*'", "'...'", "'+'", "'-'", "'/'", "'%'",
+ "'++'", "'--'", "'sizeof'", "'.'", "'->'", "'&'", "'~'", "'!'", "'*='",
+ "'/='", "'%='", "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
+ "'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='", "'<'", "'>'", "'<='",
+ "'>='", "'<<'", "'>>'", "'__asm__'", "'_asm'", "'__asm'", "'case'",
+ "'default'", "'if'", "'else'", "'switch'", "'while'", "'do'", "'for'",
+ "'goto'", "'continue'", "'break'", "'return'"
+]
+
+
+class function_definition_scope(object):
+ def __init__(self):
+ self.ModifierText = None
+ self.DeclText = None
+ self.LBLine = None
+ self.LBOffset = None
+ self.DeclLine = None
+ self.DeclOffset = None
+class postfix_expression_scope(object):
+ def __init__(self):
+ self.FuncCallText = None
+
+
+class CParser(Parser):
+ grammarFileName = "C.g"
+ tokenNames = tokenNames
+
+ def __init__(self, input):
+ Parser.__init__(self, input)
+ self.ruleMemo = {}
+
+ self.function_definition_stack = []
+ self.postfix_expression_stack = []
+
+ def printTokenInfo(self, line, offset, tokenText):
+ print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
+
+ def StorePredicateExpression(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.PredicateExpressionList.append(PredExp)
+
+ def StoreEnumerationDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.EnumerationDefinitionList.append(EnumDef)
+
+ def StoreStructUnionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.StructUnionDefinitionList.append(SUDef)
+
+ def StoreTypedefDefinition(self, StartLine, StartOffset, EndLine, EndOffset, FromText, ToText):
+ Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.TypedefDefinitionList.append(Tdef)
+
+ def StoreFunctionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText, LeftBraceLine, LeftBraceOffset, DeclLine, DeclOffset):
+ FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
+ FileProfile.FunctionDefinitionList.append(FuncDef)
+
+ def StoreVariableDeclaration(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText):
+ VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.VariableDeclarationList.append(VarDecl)
+
+ def StoreFunctionCalling(self, StartLine, StartOffset, EndLine, EndOffset, FuncName, ParamList):
+ FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.FunctionCallingList.append(FuncCall)
+
+
+
+
+ # $ANTLR start translation_unit
+ # C.g:102:1: translation_unit : ( external_declaration )* ;
+ def translation_unit(self, ):
+
+ translation_unit_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 1):
+ return
+
+ # C.g:103:2: ( ( external_declaration )* )
+ # C.g:103:4: ( external_declaration )*
+ # C.g:103:4: ( external_declaration )*
+ while True: #loop1
+ alt1 = 2
+ LA1_0 = self.input.LA(1)
+
+ if (LA1_0 == IDENTIFIER or LA1_0 == 26 or (29 <= LA1_0 <= 42) or (45 <= LA1_0 <= 46) or (48 <= LA1_0 <= 62) or LA1_0 == 66) :
+ alt1 = 1
+
+
+ if alt1 == 1:
+ # C.g:0:0: external_declaration
+ self.following.append(self.FOLLOW_external_declaration_in_translation_unit74)
+ self.external_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop1
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 1, translation_unit_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end translation_unit
+
+
+ # $ANTLR start external_declaration
+ # C.g:114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );
+ def external_declaration(self, ):
+
+ external_declaration_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 2):
+ return
+
+ # C.g:119:2: ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? )
+ alt3 = 3
+ LA3_0 = self.input.LA(1)
+
+ if ((29 <= LA3_0 <= 33)) :
+ LA3_1 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 1, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 34) :
+ LA3_2 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 2, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 35) :
+ LA3_3 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 3, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 36) :
+ LA3_4 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 4, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 37) :
+ LA3_5 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 5, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 38) :
+ LA3_6 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 6, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 39) :
+ LA3_7 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 7, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 40) :
+ LA3_8 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 8, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 41) :
+ LA3_9 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 9, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 42) :
+ LA3_10 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 10, self.input)
+
+ raise nvae
+
+ elif ((45 <= LA3_0 <= 46)) :
+ LA3_11 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 11, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 48) :
+ LA3_12 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 12, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == IDENTIFIER) :
+ LA3_13 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ elif (True) :
+ alt3 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 13, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 58) :
+ LA3_14 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 14, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 66) and (self.synpred4()):
+ alt3 = 1
+ elif (LA3_0 == 59) :
+ LA3_16 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 16, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 60) :
+ LA3_17 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 17, self.input)
+
+ raise nvae
+
+ elif ((49 <= LA3_0 <= 57) or LA3_0 == 61) :
+ LA3_18 = self.input.LA(2)
+
+ if (self.synpred4()) :
+ alt3 = 1
+ elif (self.synpred5()) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 18, self.input)
+
+ raise nvae
+
+ elif (LA3_0 == 62) and (self.synpred4()):
+ alt3 = 1
+ elif (LA3_0 == 26) :
+ alt3 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 0, self.input)
+
+ raise nvae
+
+ if alt3 == 1:
+ # C.g:119:4: ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition
+ self.following.append(self.FOLLOW_function_definition_in_external_declaration113)
+ self.function_definition()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt3 == 2:
+ # C.g:120:4: declaration
+ self.following.append(self.FOLLOW_declaration_in_external_declaration118)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt3 == 3:
+ # C.g:121:4: macro_statement ( ';' )?
+ self.following.append(self.FOLLOW_macro_statement_in_external_declaration123)
+ self.macro_statement()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:121:20: ( ';' )?
+ alt2 = 2
+ LA2_0 = self.input.LA(1)
+
+ if (LA2_0 == 25) :
+ alt2 = 1
+ if alt2 == 1:
+ # C.g:121:21: ';'
+ self.match(self.input, 25, self.FOLLOW_25_in_external_declaration126)
+ if self.failed:
+ return
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 2, external_declaration_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end external_declaration
+
+ class function_definition_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start function_definition
+ # C.g:126:1: function_definition : (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement ) ;
+ def function_definition(self, ):
+ self.function_definition_stack.append(function_definition_scope())
+ retval = self.function_definition_return()
+ retval.start = self.input.LT(1)
+ function_definition_StartIndex = self.input.index()
+ d = None
+
+ a = None
+
+ b = None
+
+ declarator1 = None
+
+
+
+ self.function_definition_stack[-1].ModifierText = ''
+ self.function_definition_stack[-1].DeclText = ''
+ self.function_definition_stack[-1].LBLine = 0
+ self.function_definition_stack[-1].LBOffset = 0
+ self.function_definition_stack[-1].DeclLine = 0
+ self.function_definition_stack[-1].DeclOffset = 0
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 3):
+ return retval
+
+ # C.g:146:2: ( (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement ) )
+ # C.g:146:4: (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement )
+ # C.g:146:5: (d= declaration_specifiers )?
+ alt4 = 2
+ LA4 = self.input.LA(1)
+ if LA4 == 29 or LA4 == 30 or LA4 == 31 or LA4 == 32 or LA4 == 33 or LA4 == 34 or LA4 == 35 or LA4 == 36 or LA4 == 37 or LA4 == 38 or LA4 == 39 or LA4 == 40 or LA4 == 41 or LA4 == 42 or LA4 == 45 or LA4 == 46 or LA4 == 48 or LA4 == 49 or LA4 == 50 or LA4 == 51 or LA4 == 52 or LA4 == 53 or LA4 == 54 or LA4 == 55 or LA4 == 56 or LA4 == 57 or LA4 == 61:
+ alt4 = 1
+ elif LA4 == IDENTIFIER:
+ LA4 = self.input.LA(2)
+ if LA4 == 66:
+ alt4 = 1
+ elif LA4 == 58:
+ LA4_21 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 59:
+ LA4_22 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 60:
+ LA4_23 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == IDENTIFIER:
+ LA4_24 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 62:
+ LA4_25 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 29 or LA4 == 30 or LA4 == 31 or LA4 == 32 or LA4 == 33:
+ LA4_26 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 34:
+ LA4_27 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 35:
+ LA4_28 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 36:
+ LA4_29 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 37:
+ LA4_30 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 38:
+ LA4_31 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 39:
+ LA4_32 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 40:
+ LA4_33 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 41:
+ LA4_34 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 42:
+ LA4_35 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 45 or LA4 == 46:
+ LA4_36 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 48:
+ LA4_37 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 49 or LA4 == 50 or LA4 == 51 or LA4 == 52 or LA4 == 53 or LA4 == 54 or LA4 == 55 or LA4 == 56 or LA4 == 57 or LA4 == 61:
+ LA4_38 = self.input.LA(3)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 58:
+ LA4_14 = self.input.LA(2)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 59:
+ LA4_16 = self.input.LA(2)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ elif LA4 == 60:
+ LA4_17 = self.input.LA(2)
+
+ if (self.synpred7()) :
+ alt4 = 1
+ if alt4 == 1:
+ # C.g:0:0: d= declaration_specifiers
+ self.following.append(self.FOLLOW_declaration_specifiers_in_function_definition157)
+ d = self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+
+ self.following.append(self.FOLLOW_declarator_in_function_definition160)
+ declarator1 = self.declarator()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:147:3: ( ( declaration )+ a= compound_statement | b= compound_statement )
+ alt6 = 2
+ LA6_0 = self.input.LA(1)
+
+ if (LA6_0 == IDENTIFIER or LA6_0 == 26 or (29 <= LA6_0 <= 42) or (45 <= LA6_0 <= 46) or (48 <= LA6_0 <= 61)) :
+ alt6 = 1
+ elif (LA6_0 == 43) :
+ alt6 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("147:3: ( ( declaration )+ a= compound_statement | b= compound_statement )", 6, 0, self.input)
+
+ raise nvae
+
+ if alt6 == 1:
+ # C.g:147:5: ( declaration )+ a= compound_statement
+ # C.g:147:5: ( declaration )+
+ cnt5 = 0
+ while True: #loop5
+ alt5 = 2
+ LA5_0 = self.input.LA(1)
+
+ if (LA5_0 == IDENTIFIER or LA5_0 == 26 or (29 <= LA5_0 <= 42) or (45 <= LA5_0 <= 46) or (48 <= LA5_0 <= 61)) :
+ alt5 = 1
+
+
+ if alt5 == 1:
+ # C.g:0:0: declaration
+ self.following.append(self.FOLLOW_declaration_in_function_definition166)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ else:
+ if cnt5 >= 1:
+ break #loop5
+
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ eee = EarlyExitException(5, self.input)
+ raise eee
+
+ cnt5 += 1
+
+
+ self.following.append(self.FOLLOW_compound_statement_in_function_definition171)
+ a = self.compound_statement()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ elif alt6 == 2:
+ # C.g:148:5: b= compound_statement
+ self.following.append(self.FOLLOW_compound_statement_in_function_definition180)
+ b = self.compound_statement()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+
+ if self.backtracking == 0:
+
+ if d is not None:
+ self.function_definition_stack[-1].ModifierText = self.input.toString(d.start, d.stop)
+ else:
+ self.function_definition_stack[-1].ModifierText = ''
+ self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start, declarator1.stop)
+ self.function_definition_stack[-1].DeclLine = declarator1.start.line
+ self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine
+ if a is not None:
+ self.function_definition_stack[-1].LBLine = a.start.line
+ self.function_definition_stack[-1].LBOffset = a.start.charPositionInLine
+ else:
+ self.function_definition_stack[-1].LBLine = b.start.line
+ self.function_definition_stack[-1].LBOffset = b.start.charPositionInLine
+
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+ if self.backtracking == 0:
+
+ self.StoreFunctionDefinition(retval.start.line, retval.start.charPositionInLine, retval.stop.line, retval.stop.charPositionInLine, self.function_definition_stack[-1].ModifierText, self.function_definition_stack[-1].DeclText, self.function_definition_stack[-1].LBLine, self.function_definition_stack[-1].LBOffset, self.function_definition_stack[-1].DeclLine, self.function_definition_stack[-1].DeclOffset)
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 3, function_definition_StartIndex)
+
+ self.function_definition_stack.pop()
+ pass
+
+ return retval
+
+ # $ANTLR end function_definition
+
+
+ # $ANTLR start declaration
+ # C.g:166:1: declaration : (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' );
+ def declaration(self, ):
+
+ declaration_StartIndex = self.input.index()
+ a = None
+ d = None
+ e = None
+ b = None
+
+ c = None
+
+ s = None
+
+ t = None
+
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 4):
+ return
+
+ # C.g:167:2: (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' )
+ alt9 = 2
+ LA9_0 = self.input.LA(1)
+
+ if (LA9_0 == 26) :
+ alt9 = 1
+ elif (LA9_0 == IDENTIFIER or (29 <= LA9_0 <= 42) or (45 <= LA9_0 <= 46) or (48 <= LA9_0 <= 61)) :
+ alt9 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("166:1: declaration : (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' );", 9, 0, self.input)
+
+ raise nvae
+
+ if alt9 == 1:
+ # C.g:167:4: a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';'
+ a = self.input.LT(1)
+ self.match(self.input, 26, self.FOLLOW_26_in_declaration203)
+ if self.failed:
+ return
+ # C.g:167:17: (b= declaration_specifiers )?
+ alt7 = 2
+ LA7 = self.input.LA(1)
+ if LA7 == 29 or LA7 == 30 or LA7 == 31 or LA7 == 32 or LA7 == 33 or LA7 == 34 or LA7 == 35 or LA7 == 36 or LA7 == 37 or LA7 == 38 or LA7 == 39 or LA7 == 40 or LA7 == 41 or LA7 == 42 or LA7 == 45 or LA7 == 46 or LA7 == 48 or LA7 == 49 or LA7 == 50 or LA7 == 51 or LA7 == 52 or LA7 == 53 or LA7 == 54 or LA7 == 55 or LA7 == 56 or LA7 == 57 or LA7 == 61:
+ alt7 = 1
+ elif LA7 == IDENTIFIER:
+ LA7_13 = self.input.LA(2)
+
+ if (LA7_13 == 62) :
+ LA7_21 = self.input.LA(3)
+
+ if (self.synpred10()) :
+ alt7 = 1
+ elif (LA7_13 == IDENTIFIER or (29 <= LA7_13 <= 42) or (45 <= LA7_13 <= 46) or (48 <= LA7_13 <= 61) or LA7_13 == 66) :
+ alt7 = 1
+ elif LA7 == 58:
+ LA7_14 = self.input.LA(2)
+
+ if (self.synpred10()) :
+ alt7 = 1
+ elif LA7 == 59:
+ LA7_16 = self.input.LA(2)
+
+ if (self.synpred10()) :
+ alt7 = 1
+ elif LA7 == 60:
+ LA7_17 = self.input.LA(2)
+
+ if (self.synpred10()) :
+ alt7 = 1
+ if alt7 == 1:
+ # C.g:0:0: b= declaration_specifiers
+ self.following.append(self.FOLLOW_declaration_specifiers_in_declaration207)
+ b = self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ self.following.append(self.FOLLOW_init_declarator_list_in_declaration216)
+ c = self.init_declarator_list()
+ self.following.pop()
+ if self.failed:
+ return
+ d = self.input.LT(1)
+ self.match(self.input, 25, self.FOLLOW_25_in_declaration220)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+
+ if b is not None:
+ self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start, b.stop), self.input.toString(c.start, c.stop))
+ else:
+ self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start, c.stop))
+
+
+
+
+ elif alt9 == 2:
+ # C.g:175:4: s= declaration_specifiers (t= init_declarator_list )? e= ';'
+ self.following.append(self.FOLLOW_declaration_specifiers_in_declaration234)
+ s = self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:175:30: (t= init_declarator_list )?
+ alt8 = 2
+ LA8_0 = self.input.LA(1)
+
+ if (LA8_0 == IDENTIFIER or (58 <= LA8_0 <= 60) or LA8_0 == 62 or LA8_0 == 66) :
+ alt8 = 1
+ if alt8 == 1:
+ # C.g:0:0: t= init_declarator_list
+ self.following.append(self.FOLLOW_init_declarator_list_in_declaration238)
+ t = self.init_declarator_list()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ e = self.input.LT(1)
+ self.match(self.input, 25, self.FOLLOW_25_in_declaration243)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+
+ if t is not None:
+ self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start, s.stop), self.input.toString(t.start, t.stop))
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 4, declaration_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end declaration
+
+ class declaration_specifiers_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start declaration_specifiers
+ # C.g:182:1: declaration_specifiers : ( storage_class_specifier | type_specifier | type_qualifier )+ ;
+ def declaration_specifiers(self, ):
+
+ retval = self.declaration_specifiers_return()
+ retval.start = self.input.LT(1)
+ declaration_specifiers_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 5):
+ return retval
+
+ # C.g:183:2: ( ( storage_class_specifier | type_specifier | type_qualifier )+ )
+ # C.g:183:6: ( storage_class_specifier | type_specifier | type_qualifier )+
+ # C.g:183:6: ( storage_class_specifier | type_specifier | type_qualifier )+
+ cnt10 = 0
+ while True: #loop10
+ alt10 = 4
+ LA10 = self.input.LA(1)
+ if LA10 == 58:
+ LA10_2 = self.input.LA(2)
+
+ if (self.synpred15()) :
+ alt10 = 3
+
+
+ elif LA10 == 59:
+ LA10_3 = self.input.LA(2)
+
+ if (self.synpred15()) :
+ alt10 = 3
+
+
+ elif LA10 == 60:
+ LA10_4 = self.input.LA(2)
+
+ if (self.synpred15()) :
+ alt10 = 3
+
+
+ elif LA10 == IDENTIFIER:
+ LA10_5 = self.input.LA(2)
+
+ if (self.synpred14()) :
+ alt10 = 2
+
+
+ elif LA10 == 53:
+ LA10_9 = self.input.LA(2)
+
+ if (self.synpred15()) :
+ alt10 = 3
+
+
+ elif LA10 == 29 or LA10 == 30 or LA10 == 31 or LA10 == 32 or LA10 == 33:
+ alt10 = 1
+ elif LA10 == 34 or LA10 == 35 or LA10 == 36 or LA10 == 37 or LA10 == 38 or LA10 == 39 or LA10 == 40 or LA10 == 41 or LA10 == 42 or LA10 == 45 or LA10 == 46 or LA10 == 48:
+ alt10 = 2
+ elif LA10 == 49 or LA10 == 50 or LA10 == 51 or LA10 == 52 or LA10 == 54 or LA10 == 55 or LA10 == 56 or LA10 == 57 or LA10 == 61:
+ alt10 = 3
+
+ if alt10 == 1:
+ # C.g:183:10: storage_class_specifier
+ self.following.append(self.FOLLOW_storage_class_specifier_in_declaration_specifiers264)
+ self.storage_class_specifier()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ elif alt10 == 2:
+ # C.g:184:7: type_specifier
+ self.following.append(self.FOLLOW_type_specifier_in_declaration_specifiers272)
+ self.type_specifier()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ elif alt10 == 3:
+ # C.g:185:13: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_declaration_specifiers286)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ else:
+ if cnt10 >= 1:
+ break #loop10
+
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ eee = EarlyExitException(10, self.input)
+ raise eee
+
+ cnt10 += 1
+
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 5, declaration_specifiers_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end declaration_specifiers
+
+ class init_declarator_list_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start init_declarator_list
+ # C.g:189:1: init_declarator_list : init_declarator ( ',' init_declarator )* ;
+ def init_declarator_list(self, ):
+
+ retval = self.init_declarator_list_return()
+ retval.start = self.input.LT(1)
+ init_declarator_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 6):
+ return retval
+
+ # C.g:190:2: ( init_declarator ( ',' init_declarator )* )
+ # C.g:190:4: init_declarator ( ',' init_declarator )*
+ self.following.append(self.FOLLOW_init_declarator_in_init_declarator_list308)
+ self.init_declarator()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:190:20: ( ',' init_declarator )*
+ while True: #loop11
+ alt11 = 2
+ LA11_0 = self.input.LA(1)
+
+ if (LA11_0 == 27) :
+ alt11 = 1
+
+
+ if alt11 == 1:
+ # C.g:190:21: ',' init_declarator
+ self.match(self.input, 27, self.FOLLOW_27_in_init_declarator_list311)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_init_declarator_in_init_declarator_list313)
+ self.init_declarator()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ else:
+ break #loop11
+
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 6, init_declarator_list_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end init_declarator_list
+
+
+ # $ANTLR start init_declarator
+ # C.g:193:1: init_declarator : declarator ( '=' initializer )? ;
+ def init_declarator(self, ):
+
+ init_declarator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 7):
+ return
+
+ # C.g:194:2: ( declarator ( '=' initializer )? )
+ # C.g:194:4: declarator ( '=' initializer )?
+ self.following.append(self.FOLLOW_declarator_in_init_declarator326)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:194:15: ( '=' initializer )?
+ alt12 = 2
+ LA12_0 = self.input.LA(1)
+
+ if (LA12_0 == 28) :
+ alt12 = 1
+ if alt12 == 1:
+ # C.g:194:16: '=' initializer
+ self.match(self.input, 28, self.FOLLOW_28_in_init_declarator329)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_initializer_in_init_declarator331)
+ self.initializer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 7, init_declarator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end init_declarator
+
+
+ # $ANTLR start storage_class_specifier
+ # C.g:197:1: storage_class_specifier : ( 'extern' | 'static' | 'auto' | 'register' | 'STATIC' );
+ def storage_class_specifier(self, ):
+
+ storage_class_specifier_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 8):
+ return
+
+ # C.g:198:2: ( 'extern' | 'static' | 'auto' | 'register' | 'STATIC' )
+ # C.g:
+ if (29 <= self.input.LA(1) <= 33):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_storage_class_specifier0
+ )
+ raise mse
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 8, storage_class_specifier_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end storage_class_specifier
+
+
+ # $ANTLR start type_specifier
+ # C.g:205:1: type_specifier : ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id );
+ def type_specifier(self, ):
+
+ type_specifier_StartIndex = self.input.index()
+ s = None
+
+ e = None
+
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 9):
+ return
+
+ # C.g:206:2: ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id )
+ alt13 = 12
+ LA13_0 = self.input.LA(1)
+
+ if (LA13_0 == 34) :
+ alt13 = 1
+ elif (LA13_0 == 35) :
+ alt13 = 2
+ elif (LA13_0 == 36) :
+ alt13 = 3
+ elif (LA13_0 == 37) :
+ alt13 = 4
+ elif (LA13_0 == 38) :
+ alt13 = 5
+ elif (LA13_0 == 39) :
+ alt13 = 6
+ elif (LA13_0 == 40) :
+ alt13 = 7
+ elif (LA13_0 == 41) :
+ alt13 = 8
+ elif (LA13_0 == 42) :
+ alt13 = 9
+ elif ((45 <= LA13_0 <= 46)) :
+ alt13 = 10
+ elif (LA13_0 == 48) :
+ alt13 = 11
+ elif (LA13_0 == IDENTIFIER) and (self.synpred34()):
+ alt13 = 12
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("205:1: type_specifier : ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id );", 13, 0, self.input)
+
+ raise nvae
+
+ if alt13 == 1:
+ # C.g:206:4: 'void'
+ self.match(self.input, 34, self.FOLLOW_34_in_type_specifier376)
+ if self.failed:
+ return
+
+
+ elif alt13 == 2:
+ # C.g:207:4: 'char'
+ self.match(self.input, 35, self.FOLLOW_35_in_type_specifier381)
+ if self.failed:
+ return
+
+
+ elif alt13 == 3:
+ # C.g:208:4: 'short'
+ self.match(self.input, 36, self.FOLLOW_36_in_type_specifier386)
+ if self.failed:
+ return
+
+
+ elif alt13 == 4:
+ # C.g:209:4: 'int'
+ self.match(self.input, 37, self.FOLLOW_37_in_type_specifier391)
+ if self.failed:
+ return
+
+
+ elif alt13 == 5:
+ # C.g:210:4: 'long'
+ self.match(self.input, 38, self.FOLLOW_38_in_type_specifier396)
+ if self.failed:
+ return
+
+
+ elif alt13 == 6:
+ # C.g:211:4: 'float'
+ self.match(self.input, 39, self.FOLLOW_39_in_type_specifier401)
+ if self.failed:
+ return
+
+
+ elif alt13 == 7:
+ # C.g:212:4: 'double'
+ self.match(self.input, 40, self.FOLLOW_40_in_type_specifier406)
+ if self.failed:
+ return
+
+
+ elif alt13 == 8:
+ # C.g:213:4: 'signed'
+ self.match(self.input, 41, self.FOLLOW_41_in_type_specifier411)
+ if self.failed:
+ return
+
+
+ elif alt13 == 9:
+ # C.g:214:4: 'unsigned'
+ self.match(self.input, 42, self.FOLLOW_42_in_type_specifier416)
+ if self.failed:
+ return
+
+
+ elif alt13 == 10:
+ # C.g:215:4: s= struct_or_union_specifier
+ self.following.append(self.FOLLOW_struct_or_union_specifier_in_type_specifier423)
+ s = self.struct_or_union_specifier()
+ self.following.pop()
+ if self.failed:
+ return
+ if self.backtracking == 0:
+
+ if s.stop is not None:
+ self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start, s.stop))
+
+
+
+
+ elif alt13 == 11:
+ # C.g:220:4: e= enum_specifier
+ self.following.append(self.FOLLOW_enum_specifier_in_type_specifier433)
+ e = self.enum_specifier()
+ self.following.pop()
+ if self.failed:
+ return
+ if self.backtracking == 0:
+
+ if e.stop is not None:
+ self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
+
+
+
+
+ elif alt13 == 12:
+ # C.g:225:4: ( IDENTIFIER ( type_qualifier )* declarator )=> type_id
+ self.following.append(self.FOLLOW_type_id_in_type_specifier451)
+ self.type_id()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 9, type_specifier_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end type_specifier
+
+
+ # $ANTLR start type_id
+ # C.g:228:1: type_id : IDENTIFIER ;
+ def type_id(self, ):
+
+ type_id_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 10):
+ return
+
+ # C.g:229:5: ( IDENTIFIER )
+ # C.g:229:9: IDENTIFIER
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_type_id467)
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 10, type_id_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end type_id
+
+ class struct_or_union_specifier_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start struct_or_union_specifier
+ # C.g:233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );
+ def struct_or_union_specifier(self, ):
+
+ retval = self.struct_or_union_specifier_return()
+ retval.start = self.input.LT(1)
+ struct_or_union_specifier_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 11):
+ return retval
+
+ # C.g:235:2: ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER )
+ alt15 = 2
+ LA15_0 = self.input.LA(1)
+
+ if ((45 <= LA15_0 <= 46)) :
+ LA15_1 = self.input.LA(2)
+
+ if (LA15_1 == IDENTIFIER) :
+ LA15_2 = self.input.LA(3)
+
+ if (LA15_2 == 43) :
+ alt15 = 1
+ elif (LA15_2 == EOF or LA15_2 == IDENTIFIER or LA15_2 == 25 or LA15_2 == 27 or (29 <= LA15_2 <= 42) or (45 <= LA15_2 <= 64) or LA15_2 == 66) :
+ alt15 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 2, self.input)
+
+ raise nvae
+
+ elif (LA15_1 == 43) :
+ alt15 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 1, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 0, self.input)
+
+ raise nvae
+
+ if alt15 == 1:
+ # C.g:235:4: struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}'
+ self.following.append(self.FOLLOW_struct_or_union_in_struct_or_union_specifier494)
+ self.struct_or_union()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:235:20: ( IDENTIFIER )?
+ alt14 = 2
+ LA14_0 = self.input.LA(1)
+
+ if (LA14_0 == IDENTIFIER) :
+ alt14 = 1
+ if alt14 == 1:
+ # C.g:0:0: IDENTIFIER
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_struct_or_union_specifier496)
+ if self.failed:
+ return retval
+
+
+
+ self.match(self.input, 43, self.FOLLOW_43_in_struct_or_union_specifier499)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_struct_declaration_list_in_struct_or_union_specifier501)
+ self.struct_declaration_list()
+ self.following.pop()
+ if self.failed:
+ return retval
+ self.match(self.input, 44, self.FOLLOW_44_in_struct_or_union_specifier503)
+ if self.failed:
+ return retval
+
+
+ elif alt15 == 2:
+ # C.g:236:4: struct_or_union IDENTIFIER
+ self.following.append(self.FOLLOW_struct_or_union_in_struct_or_union_specifier508)
+ self.struct_or_union()
+ self.following.pop()
+ if self.failed:
+ return retval
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_struct_or_union_specifier510)
+ if self.failed:
+ return retval
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 11, struct_or_union_specifier_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end struct_or_union_specifier
+
+
+ # $ANTLR start struct_or_union
+ # C.g:239:1: struct_or_union : ( 'struct' | 'union' );
+ def struct_or_union(self, ):
+
+ struct_or_union_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 12):
+ return
+
+ # C.g:240:2: ( 'struct' | 'union' )
+ # C.g:
+ if (45 <= self.input.LA(1) <= 46):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_struct_or_union0
+ )
+ raise mse
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 12, struct_or_union_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end struct_or_union
+
+
+ # $ANTLR start struct_declaration_list
+ # C.g:244:1: struct_declaration_list : ( struct_declaration )+ ;
+ def struct_declaration_list(self, ):
+
+ struct_declaration_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 13):
+ return
+
+ # C.g:245:2: ( ( struct_declaration )+ )
+ # C.g:245:4: ( struct_declaration )+
+ # C.g:245:4: ( struct_declaration )+
+ cnt16 = 0
+ while True: #loop16
+ alt16 = 2
+ LA16_0 = self.input.LA(1)
+
+ if (LA16_0 == IDENTIFIER or (34 <= LA16_0 <= 42) or (45 <= LA16_0 <= 46) or (48 <= LA16_0 <= 61)) :
+ alt16 = 1
+
+
+ if alt16 == 1:
+ # C.g:0:0: struct_declaration
+ self.following.append(self.FOLLOW_struct_declaration_in_struct_declaration_list537)
+ self.struct_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ if cnt16 >= 1:
+ break #loop16
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(16, self.input)
+ raise eee
+
+ cnt16 += 1
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 13, struct_declaration_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end struct_declaration_list
+
+
+ # $ANTLR start struct_declaration
+ # C.g:248:1: struct_declaration : specifier_qualifier_list struct_declarator_list ';' ;
+ def struct_declaration(self, ):
+
+ struct_declaration_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 14):
+ return
+
+ # C.g:249:2: ( specifier_qualifier_list struct_declarator_list ';' )
+ # C.g:249:4: specifier_qualifier_list struct_declarator_list ';'
+ self.following.append(self.FOLLOW_specifier_qualifier_list_in_struct_declaration549)
+ self.specifier_qualifier_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_struct_declarator_list_in_struct_declaration551)
+ self.struct_declarator_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_struct_declaration553)
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 14, struct_declaration_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end struct_declaration
+
+
+ # $ANTLR start specifier_qualifier_list
+ # C.g:252:1: specifier_qualifier_list : ( type_qualifier | type_specifier )+ ;
+ def specifier_qualifier_list(self, ):
+
+ specifier_qualifier_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 15):
+ return
+
+ # C.g:253:2: ( ( type_qualifier | type_specifier )+ )
+ # C.g:253:4: ( type_qualifier | type_specifier )+
+ # C.g:253:4: ( type_qualifier | type_specifier )+
+ cnt17 = 0
+ while True: #loop17
+ alt17 = 3
+ LA17 = self.input.LA(1)
+ if LA17 == 58:
+ LA17_2 = self.input.LA(2)
+
+ if (self.synpred39()) :
+ alt17 = 1
+
+
+ elif LA17 == 59:
+ LA17_3 = self.input.LA(2)
+
+ if (self.synpred39()) :
+ alt17 = 1
+
+
+ elif LA17 == 60:
+ LA17_4 = self.input.LA(2)
+
+ if (self.synpred39()) :
+ alt17 = 1
+
+
+ elif LA17 == IDENTIFIER:
+ LA17 = self.input.LA(2)
+ if LA17 == EOF or LA17 == IDENTIFIER or LA17 == 34 or LA17 == 35 or LA17 == 36 or LA17 == 37 or LA17 == 38 or LA17 == 39 or LA17 == 40 or LA17 == 41 or LA17 == 42 or LA17 == 45 or LA17 == 46 or LA17 == 48 or LA17 == 49 or LA17 == 50 or LA17 == 51 or LA17 == 52 or LA17 == 53 or LA17 == 54 or LA17 == 55 or LA17 == 56 or LA17 == 57 or LA17 == 58 or LA17 == 59 or LA17 == 60 or LA17 == 61 or LA17 == 63 or LA17 == 66:
+ alt17 = 2
+ elif LA17 == 62:
+ LA17_94 = self.input.LA(3)
+
+ if (self.synpred40()) :
+ alt17 = 2
+
+
+ elif LA17 == 47:
+ LA17_95 = self.input.LA(3)
+
+ if (self.synpred40()) :
+ alt17 = 2
+
+
+ elif LA17 == 64:
+ LA17_96 = self.input.LA(3)
+
+ if (self.synpred40()) :
+ alt17 = 2
+
+
+
+ elif LA17 == 49 or LA17 == 50 or LA17 == 51 or LA17 == 52 or LA17 == 53 or LA17 == 54 or LA17 == 55 or LA17 == 56 or LA17 == 57 or LA17 == 61:
+ alt17 = 1
+ elif LA17 == 34 or LA17 == 35 or LA17 == 36 or LA17 == 37 or LA17 == 38 or LA17 == 39 or LA17 == 40 or LA17 == 41 or LA17 == 42 or LA17 == 45 or LA17 == 46 or LA17 == 48:
+ alt17 = 2
+
+ if alt17 == 1:
+ # C.g:253:6: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_specifier_qualifier_list566)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt17 == 2:
+ # C.g:253:23: type_specifier
+ self.following.append(self.FOLLOW_type_specifier_in_specifier_qualifier_list570)
+ self.type_specifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ if cnt17 >= 1:
+ break #loop17
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(17, self.input)
+ raise eee
+
+ cnt17 += 1
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 15, specifier_qualifier_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end specifier_qualifier_list
+
+
+ # $ANTLR start struct_declarator_list
+ # C.g:256:1: struct_declarator_list : struct_declarator ( ',' struct_declarator )* ;
+ def struct_declarator_list(self, ):
+
+ struct_declarator_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 16):
+ return
+
+ # C.g:257:2: ( struct_declarator ( ',' struct_declarator )* )
+ # C.g:257:4: struct_declarator ( ',' struct_declarator )*
+ self.following.append(self.FOLLOW_struct_declarator_in_struct_declarator_list584)
+ self.struct_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:257:22: ( ',' struct_declarator )*
+ while True: #loop18
+ alt18 = 2
+ LA18_0 = self.input.LA(1)
+
+ if (LA18_0 == 27) :
+ alt18 = 1
+
+
+ if alt18 == 1:
+ # C.g:257:23: ',' struct_declarator
+ self.match(self.input, 27, self.FOLLOW_27_in_struct_declarator_list587)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_struct_declarator_in_struct_declarator_list589)
+ self.struct_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop18
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 16, struct_declarator_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end struct_declarator_list
+
+
+ # $ANTLR start struct_declarator
+ # C.g:260:1: struct_declarator : ( declarator ( ':' constant_expression )? | ':' constant_expression );
+ def struct_declarator(self, ):
+
+ struct_declarator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 17):
+ return
+
+ # C.g:261:2: ( declarator ( ':' constant_expression )? | ':' constant_expression )
+ alt20 = 2
+ LA20_0 = self.input.LA(1)
+
+ if (LA20_0 == IDENTIFIER or (58 <= LA20_0 <= 60) or LA20_0 == 62 or LA20_0 == 66) :
+ alt20 = 1
+ elif (LA20_0 == 47) :
+ alt20 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("260:1: struct_declarator : ( declarator ( ':' constant_expression )? | ':' constant_expression );", 20, 0, self.input)
+
+ raise nvae
+
+ if alt20 == 1:
+ # C.g:261:4: declarator ( ':' constant_expression )?
+ self.following.append(self.FOLLOW_declarator_in_struct_declarator602)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:261:15: ( ':' constant_expression )?
+ alt19 = 2
+ LA19_0 = self.input.LA(1)
+
+ if (LA19_0 == 47) :
+ alt19 = 1
+ if alt19 == 1:
+ # C.g:261:16: ':' constant_expression
+ self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator605)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_constant_expression_in_struct_declarator607)
+ self.constant_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ elif alt20 == 2:
+ # C.g:262:4: ':' constant_expression
+ self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator614)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_constant_expression_in_struct_declarator616)
+ self.constant_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 17, struct_declarator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end struct_declarator
+
+ class enum_specifier_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start enum_specifier
+ # C.g:265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );
+ def enum_specifier(self, ):
+
+ retval = self.enum_specifier_return()
+ retval.start = self.input.LT(1)
+ enum_specifier_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 18):
+ return retval
+
+ # C.g:267:2: ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER )
+ alt23 = 3
+ LA23_0 = self.input.LA(1)
+
+ if (LA23_0 == 48) :
+ LA23_1 = self.input.LA(2)
+
+ if (LA23_1 == IDENTIFIER) :
+ LA23_2 = self.input.LA(3)
+
+ if (LA23_2 == 43) :
+ alt23 = 2
+ elif (LA23_2 == EOF or LA23_2 == IDENTIFIER or LA23_2 == 25 or LA23_2 == 27 or (29 <= LA23_2 <= 42) or (45 <= LA23_2 <= 64) or LA23_2 == 66) :
+ alt23 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 2, self.input)
+
+ raise nvae
+
+ elif (LA23_1 == 43) :
+ alt23 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 1, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 0, self.input)
+
+ raise nvae
+
+ if alt23 == 1:
+ # C.g:267:4: 'enum' '{' enumerator_list ( ',' )? '}'
+ self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier634)
+ if self.failed:
+ return retval
+ self.match(self.input, 43, self.FOLLOW_43_in_enum_specifier636)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_enumerator_list_in_enum_specifier638)
+ self.enumerator_list()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:267:31: ( ',' )?
+ alt21 = 2
+ LA21_0 = self.input.LA(1)
+
+ if (LA21_0 == 27) :
+ alt21 = 1
+ if alt21 == 1:
+ # C.g:0:0: ','
+ self.match(self.input, 27, self.FOLLOW_27_in_enum_specifier640)
+ if self.failed:
+ return retval
+
+
+
+ self.match(self.input, 44, self.FOLLOW_44_in_enum_specifier643)
+ if self.failed:
+ return retval
+
+
+ elif alt23 == 2:
+ # C.g:268:4: 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}'
+ self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier648)
+ if self.failed:
+ return retval
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enum_specifier650)
+ if self.failed:
+ return retval
+ self.match(self.input, 43, self.FOLLOW_43_in_enum_specifier652)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_enumerator_list_in_enum_specifier654)
+ self.enumerator_list()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:268:42: ( ',' )?
+ alt22 = 2
+ LA22_0 = self.input.LA(1)
+
+ if (LA22_0 == 27) :
+ alt22 = 1
+ if alt22 == 1:
+ # C.g:0:0: ','
+ self.match(self.input, 27, self.FOLLOW_27_in_enum_specifier656)
+ if self.failed:
+ return retval
+
+
+
+ self.match(self.input, 44, self.FOLLOW_44_in_enum_specifier659)
+ if self.failed:
+ return retval
+
+
+ elif alt23 == 3:
+ # C.g:269:4: 'enum' IDENTIFIER
+ self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier664)
+ if self.failed:
+ return retval
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enum_specifier666)
+ if self.failed:
+ return retval
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 18, enum_specifier_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end enum_specifier
+
+
+ # $ANTLR start enumerator_list
+ # C.g:272:1: enumerator_list : enumerator ( ',' enumerator )* ;
+ def enumerator_list(self, ):
+
+ enumerator_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 19):
+ return
+
+ # C.g:273:2: ( enumerator ( ',' enumerator )* )
+ # C.g:273:4: enumerator ( ',' enumerator )*
+ self.following.append(self.FOLLOW_enumerator_in_enumerator_list677)
+ self.enumerator()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:273:15: ( ',' enumerator )*
+ while True: #loop24
+ alt24 = 2
+ LA24_0 = self.input.LA(1)
+
+ if (LA24_0 == 27) :
+ LA24_1 = self.input.LA(2)
+
+ if (LA24_1 == IDENTIFIER) :
+ alt24 = 1
+
+
+
+
+ if alt24 == 1:
+ # C.g:273:16: ',' enumerator
+ self.match(self.input, 27, self.FOLLOW_27_in_enumerator_list680)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_enumerator_in_enumerator_list682)
+ self.enumerator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop24
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 19, enumerator_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end enumerator_list
+
+
+ # $ANTLR start enumerator
+ # C.g:276:1: enumerator : IDENTIFIER ( '=' constant_expression )? ;
+ def enumerator(self, ):
+
+ enumerator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 20):
+ return
+
+ # C.g:277:2: ( IDENTIFIER ( '=' constant_expression )? )
+ # C.g:277:4: IDENTIFIER ( '=' constant_expression )?
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enumerator695)
+ if self.failed:
+ return
+ # C.g:277:15: ( '=' constant_expression )?
+ alt25 = 2
+ LA25_0 = self.input.LA(1)
+
+ if (LA25_0 == 28) :
+ alt25 = 1
+ if alt25 == 1:
+ # C.g:277:16: '=' constant_expression
+ self.match(self.input, 28, self.FOLLOW_28_in_enumerator698)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_constant_expression_in_enumerator700)
+ self.constant_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 20, enumerator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end enumerator
+
+
+ # $ANTLR start type_qualifier
+ # C.g:280:1: type_qualifier : ( 'const' | 'volatile' | 'IN' | 'OUT' | 'OPTIONAL' | 'CONST' | 'UNALIGNED' | 'VOLATILE' | 'GLOBAL_REMOVE_IF_UNREFERENCED' | 'EFIAPI' | 'EFI_BOOTSERVICE' | 'EFI_RUNTIMESERVICE' | 'PACKED' );
+ def type_qualifier(self, ):
+
+ type_qualifier_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 21):
+ return
+
+ # C.g:281:2: ( 'const' | 'volatile' | 'IN' | 'OUT' | 'OPTIONAL' | 'CONST' | 'UNALIGNED' | 'VOLATILE' | 'GLOBAL_REMOVE_IF_UNREFERENCED' | 'EFIAPI' | 'EFI_BOOTSERVICE' | 'EFI_RUNTIMESERVICE' | 'PACKED' )
+ # C.g:
+ if (49 <= self.input.LA(1) <= 61):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_type_qualifier0
+ )
+ raise mse
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 21, type_qualifier_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end type_qualifier
+
+ class declarator_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start declarator
+ # C.g:296:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );
+ def declarator(self, ):
+
+ retval = self.declarator_return()
+ retval.start = self.input.LT(1)
+ declarator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 22):
+ return retval
+
+ # C.g:297:2: ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer )
+ alt30 = 2
+ LA30_0 = self.input.LA(1)
+
+ if (LA30_0 == 66) :
+ LA30_1 = self.input.LA(2)
+
+ if (self.synpred66()) :
+ alt30 = 1
+ elif (True) :
+ alt30 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("296:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );", 30, 1, self.input)
+
+ raise nvae
+
+ elif (LA30_0 == IDENTIFIER or (58 <= LA30_0 <= 60) or LA30_0 == 62) :
+ alt30 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("296:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );", 30, 0, self.input)
+
+ raise nvae
+
+ if alt30 == 1:
+ # C.g:297:4: ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator
+ # C.g:297:4: ( pointer )?
+ alt26 = 2
+ LA26_0 = self.input.LA(1)
+
+ if (LA26_0 == 66) :
+ alt26 = 1
+ if alt26 == 1:
+ # C.g:0:0: pointer
+ self.following.append(self.FOLLOW_pointer_in_declarator784)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+
+ # C.g:297:13: ( 'EFIAPI' )?
+ alt27 = 2
+ LA27_0 = self.input.LA(1)
+
+ if (LA27_0 == 58) :
+ alt27 = 1
+ if alt27 == 1:
+ # C.g:297:14: 'EFIAPI'
+ self.match(self.input, 58, self.FOLLOW_58_in_declarator788)
+ if self.failed:
+ return retval
+
+
+
+ # C.g:297:25: ( 'EFI_BOOTSERVICE' )?
+ alt28 = 2
+ LA28_0 = self.input.LA(1)
+
+ if (LA28_0 == 59) :
+ alt28 = 1
+ if alt28 == 1:
+ # C.g:297:26: 'EFI_BOOTSERVICE'
+ self.match(self.input, 59, self.FOLLOW_59_in_declarator793)
+ if self.failed:
+ return retval
+
+
+
+ # C.g:297:46: ( 'EFI_RUNTIMESERVICE' )?
+ alt29 = 2
+ LA29_0 = self.input.LA(1)
+
+ if (LA29_0 == 60) :
+ alt29 = 1
+ if alt29 == 1:
+ # C.g:297:47: 'EFI_RUNTIMESERVICE'
+ self.match(self.input, 60, self.FOLLOW_60_in_declarator798)
+ if self.failed:
+ return retval
+
+
+
+ self.following.append(self.FOLLOW_direct_declarator_in_declarator802)
+ self.direct_declarator()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ elif alt30 == 2:
+ # C.g:299:4: pointer
+ self.following.append(self.FOLLOW_pointer_in_declarator808)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 22, declarator_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end declarator
+
+
+ # $ANTLR start direct_declarator
+ # C.g:302:1: direct_declarator : ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ );
+ def direct_declarator(self, ):
+
+ direct_declarator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 23):
+ return
+
+ # C.g:303:2: ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ )
+ alt34 = 2
+ LA34_0 = self.input.LA(1)
+
+ if (LA34_0 == IDENTIFIER) :
+ alt34 = 1
+ elif (LA34_0 == 62) :
+ alt34 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("302:1: direct_declarator : ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ );", 34, 0, self.input)
+
+ raise nvae
+
+ if alt34 == 1:
+ # C.g:303:4: IDENTIFIER ( declarator_suffix )*
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_direct_declarator819)
+ if self.failed:
+ return
+ # C.g:303:15: ( declarator_suffix )*
+ while True: #loop31
+ alt31 = 2
+ LA31_0 = self.input.LA(1)
+
+ if (LA31_0 == 62) :
+ LA31 = self.input.LA(2)
+ if LA31 == 63:
+ LA31_30 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 58:
+ LA31_31 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 66:
+ LA31_32 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 59:
+ LA31_33 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 60:
+ LA31_34 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == IDENTIFIER:
+ LA31_35 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 29 or LA31 == 30 or LA31 == 31 or LA31 == 32 or LA31 == 33:
+ LA31_37 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 34:
+ LA31_38 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 35:
+ LA31_39 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 36:
+ LA31_40 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 37:
+ LA31_41 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 38:
+ LA31_42 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 39:
+ LA31_43 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 40:
+ LA31_44 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 41:
+ LA31_45 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 42:
+ LA31_46 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 45 or LA31 == 46:
+ LA31_47 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 48:
+ LA31_48 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 49 or LA31 == 50 or LA31 == 51 or LA31 == 52 or LA31 == 53 or LA31 == 54 or LA31 == 55 or LA31 == 56 or LA31 == 57 or LA31 == 61:
+ LA31_49 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+
+ elif (LA31_0 == 64) :
+ LA31 = self.input.LA(2)
+ if LA31 == 65:
+ LA31_51 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 62:
+ LA31_52 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == IDENTIFIER:
+ LA31_53 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == HEX_LITERAL:
+ LA31_54 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == OCTAL_LITERAL:
+ LA31_55 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == DECIMAL_LITERAL:
+ LA31_56 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == CHARACTER_LITERAL:
+ LA31_57 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == STRING_LITERAL:
+ LA31_58 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == FLOATING_POINT_LITERAL:
+ LA31_59 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 72:
+ LA31_60 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 73:
+ LA31_61 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 66 or LA31 == 68 or LA31 == 69 or LA31 == 77 or LA31 == 78 or LA31 == 79:
+ LA31_62 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+ elif LA31 == 74:
+ LA31_63 = self.input.LA(3)
+
+ if (self.synpred67()) :
+ alt31 = 1
+
+
+
+
+
+ if alt31 == 1:
+ # C.g:0:0: declarator_suffix
+ self.following.append(self.FOLLOW_declarator_suffix_in_direct_declarator821)
+ self.declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop31
+
+
+
+
+ elif alt34 == 2:
+ # C.g:304:4: '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+
+ self.match(self.input, 62, self.FOLLOW_62_in_direct_declarator827)
+ if self.failed:
+ return
+ # C.g:304:8: ( 'EFIAPI' )?
+ alt32 = 2
+ LA32_0 = self.input.LA(1)
+
+ if (LA32_0 == 58) :
+ LA32_1 = self.input.LA(2)
+
+ if (self.synpred69()) :
+ alt32 = 1
+ if alt32 == 1:
+ # C.g:304:9: 'EFIAPI'
+ self.match(self.input, 58, self.FOLLOW_58_in_direct_declarator830)
+ if self.failed:
+ return
+
+
+
+ self.following.append(self.FOLLOW_declarator_in_direct_declarator834)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_direct_declarator836)
+ if self.failed:
+ return
+ # C.g:304:35: ( declarator_suffix )+
+ cnt33 = 0
+ while True: #loop33
+ alt33 = 2
+ LA33_0 = self.input.LA(1)
+
+ if (LA33_0 == 62) :
+ LA33 = self.input.LA(2)
+ if LA33 == 63:
+ LA33_30 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 58:
+ LA33_31 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 66:
+ LA33_32 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 59:
+ LA33_33 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 60:
+ LA33_34 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == IDENTIFIER:
+ LA33_35 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 29 or LA33 == 30 or LA33 == 31 or LA33 == 32 or LA33 == 33:
+ LA33_37 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 34:
+ LA33_38 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 35:
+ LA33_39 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 36:
+ LA33_40 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 37:
+ LA33_41 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 38:
+ LA33_42 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 39:
+ LA33_43 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 40:
+ LA33_44 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 41:
+ LA33_45 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 42:
+ LA33_46 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 45 or LA33 == 46:
+ LA33_47 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 48:
+ LA33_48 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 49 or LA33 == 50 or LA33 == 51 or LA33 == 52 or LA33 == 53 or LA33 == 54 or LA33 == 55 or LA33 == 56 or LA33 == 57 or LA33 == 61:
+ LA33_49 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+
+ elif (LA33_0 == 64) :
+ LA33 = self.input.LA(2)
+ if LA33 == 65:
+ LA33_51 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 62:
+ LA33_52 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == IDENTIFIER:
+ LA33_53 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == HEX_LITERAL:
+ LA33_54 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == OCTAL_LITERAL:
+ LA33_55 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == DECIMAL_LITERAL:
+ LA33_56 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == CHARACTER_LITERAL:
+ LA33_57 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == STRING_LITERAL:
+ LA33_58 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == FLOATING_POINT_LITERAL:
+ LA33_59 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 72:
+ LA33_60 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 73:
+ LA33_61 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 66 or LA33 == 68 or LA33 == 69 or LA33 == 77 or LA33 == 78 or LA33 == 79:
+ LA33_62 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+ elif LA33 == 74:
+ LA33_63 = self.input.LA(3)
+
+ if (self.synpred70()) :
+ alt33 = 1
+
+
+
+
+
+ if alt33 == 1:
+ # C.g:0:0: declarator_suffix
+ self.following.append(self.FOLLOW_declarator_suffix_in_direct_declarator838)
+ self.declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ if cnt33 >= 1:
+ break #loop33
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(33, self.input)
+ raise eee
+
+ cnt33 += 1
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 23, direct_declarator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end direct_declarator
+
+
+ # $ANTLR start declarator_suffix
+ # C.g:307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );
+ def declarator_suffix(self, ):
+
+ declarator_suffix_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 24):
+ return
+
+ # C.g:308:2: ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' )
+ alt35 = 5
+ LA35_0 = self.input.LA(1)
+
+ if (LA35_0 == 64) :
+ LA35_1 = self.input.LA(2)
+
+ if (LA35_1 == 65) :
+ alt35 = 2
+ elif ((IDENTIFIER <= LA35_1 <= FLOATING_POINT_LITERAL) or LA35_1 == 62 or LA35_1 == 66 or (68 <= LA35_1 <= 69) or (72 <= LA35_1 <= 74) or (77 <= LA35_1 <= 79)) :
+ alt35 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 1, self.input)
+
+ raise nvae
+
+ elif (LA35_0 == 62) :
+ LA35 = self.input.LA(2)
+ if LA35 == 63:
+ alt35 = 5
+ elif LA35 == 29 or LA35 == 30 or LA35 == 31 or LA35 == 32 or LA35 == 33 or LA35 == 34 or LA35 == 35 or LA35 == 36 or LA35 == 37 or LA35 == 38 or LA35 == 39 or LA35 == 40 or LA35 == 41 or LA35 == 42 or LA35 == 45 or LA35 == 46 or LA35 == 48 or LA35 == 49 or LA35 == 50 or LA35 == 51 or LA35 == 52 or LA35 == 53 or LA35 == 54 or LA35 == 55 or LA35 == 56 or LA35 == 57 or LA35 == 58 or LA35 == 59 or LA35 == 60 or LA35 == 61 or LA35 == 66:
+ alt35 = 3
+ elif LA35 == IDENTIFIER:
+ LA35_29 = self.input.LA(3)
+
+ if (self.synpred73()) :
+ alt35 = 3
+ elif (self.synpred74()) :
+ alt35 = 4
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 29, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 2, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 0, self.input)
+
+ raise nvae
+
+ if alt35 == 1:
+ # C.g:308:6: '[' constant_expression ']'
+ self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix852)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_constant_expression_in_declarator_suffix854)
+ self.constant_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 65, self.FOLLOW_65_in_declarator_suffix856)
+ if self.failed:
+ return
+
+
+ elif alt35 == 2:
+ # C.g:309:9: '[' ']'
+ self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix866)
+ if self.failed:
+ return
+ self.match(self.input, 65, self.FOLLOW_65_in_declarator_suffix868)
+ if self.failed:
+ return
+
+
+ elif alt35 == 3:
+ # C.g:310:9: '(' parameter_type_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix878)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_parameter_type_list_in_declarator_suffix880)
+ self.parameter_type_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix882)
+ if self.failed:
+ return
+
+
+ elif alt35 == 4:
+ # C.g:311:9: '(' identifier_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix892)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_identifier_list_in_declarator_suffix894)
+ self.identifier_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix896)
+ if self.failed:
+ return
+
+
+ elif alt35 == 5:
+ # C.g:312:9: '(' ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix906)
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix908)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 24, declarator_suffix_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end declarator_suffix
+
+
+ # $ANTLR start pointer
+ # C.g:315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );
+ def pointer(self, ):
+
+ pointer_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 25):
+ return
+
+ # C.g:316:2: ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' )
+ alt38 = 3
+ LA38_0 = self.input.LA(1)
+
+ if (LA38_0 == 66) :
+ LA38 = self.input.LA(2)
+ if LA38 == 66:
+ LA38_2 = self.input.LA(3)
+
+ if (self.synpred78()) :
+ alt38 = 2
+ elif (True) :
+ alt38 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 2, self.input)
+
+ raise nvae
+
+ elif LA38 == 58:
+ LA38_3 = self.input.LA(3)
+
+ if (self.synpred77()) :
+ alt38 = 1
+ elif (True) :
+ alt38 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 3, self.input)
+
+ raise nvae
+
+ elif LA38 == 59:
+ LA38_4 = self.input.LA(3)
+
+ if (self.synpred77()) :
+ alt38 = 1
+ elif (True) :
+ alt38 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 4, self.input)
+
+ raise nvae
+
+ elif LA38 == 60:
+ LA38_5 = self.input.LA(3)
+
+ if (self.synpred77()) :
+ alt38 = 1
+ elif (True) :
+ alt38 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 5, self.input)
+
+ raise nvae
+
+ elif LA38 == EOF or LA38 == IDENTIFIER or LA38 == 25 or LA38 == 26 or LA38 == 27 or LA38 == 28 or LA38 == 29 or LA38 == 30 or LA38 == 31 or LA38 == 32 or LA38 == 33 or LA38 == 34 or LA38 == 35 or LA38 == 36 or LA38 == 37 or LA38 == 38 or LA38 == 39 or LA38 == 40 or LA38 == 41 or LA38 == 42 or LA38 == 43 or LA38 == 45 or LA38 == 46 or LA38 == 47 or LA38 == 48 or LA38 == 62 or LA38 == 63 or LA38 == 64:
+ alt38 = 3
+ elif LA38 == 53:
+ LA38_21 = self.input.LA(3)
+
+ if (self.synpred77()) :
+ alt38 = 1
+ elif (True) :
+ alt38 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 21, self.input)
+
+ raise nvae
+
+ elif LA38 == 49 or LA38 == 50 or LA38 == 51 or LA38 == 52 or LA38 == 54 or LA38 == 55 or LA38 == 56 or LA38 == 57 or LA38 == 61:
+ LA38_29 = self.input.LA(3)
+
+ if (self.synpred77()) :
+ alt38 = 1
+ elif (True) :
+ alt38 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 29, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 1, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 0, self.input)
+
+ raise nvae
+
+ if alt38 == 1:
+ # C.g:316:4: '*' ( type_qualifier )+ ( pointer )?
+ self.match(self.input, 66, self.FOLLOW_66_in_pointer919)
+ if self.failed:
+ return
+ # C.g:316:8: ( type_qualifier )+
+ cnt36 = 0
+ while True: #loop36
+ alt36 = 2
+ LA36 = self.input.LA(1)
+ if LA36 == 58:
+ LA36_2 = self.input.LA(2)
+
+ if (self.synpred75()) :
+ alt36 = 1
+
+
+ elif LA36 == 59:
+ LA36_3 = self.input.LA(2)
+
+ if (self.synpred75()) :
+ alt36 = 1
+
+
+ elif LA36 == 60:
+ LA36_4 = self.input.LA(2)
+
+ if (self.synpred75()) :
+ alt36 = 1
+
+
+ elif LA36 == 53:
+ LA36_20 = self.input.LA(2)
+
+ if (self.synpred75()) :
+ alt36 = 1
+
+
+ elif LA36 == 49 or LA36 == 50 or LA36 == 51 or LA36 == 52 or LA36 == 54 or LA36 == 55 or LA36 == 56 or LA36 == 57 or LA36 == 61:
+ LA36_28 = self.input.LA(2)
+
+ if (self.synpred75()) :
+ alt36 = 1
+
+
+
+ if alt36 == 1:
+ # C.g:0:0: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_pointer921)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ if cnt36 >= 1:
+ break #loop36
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(36, self.input)
+ raise eee
+
+ cnt36 += 1
+
+
+ # C.g:316:24: ( pointer )?
+ alt37 = 2
+ LA37_0 = self.input.LA(1)
+
+ if (LA37_0 == 66) :
+ LA37_1 = self.input.LA(2)
+
+ if (self.synpred76()) :
+ alt37 = 1
+ if alt37 == 1:
+ # C.g:0:0: pointer
+ self.following.append(self.FOLLOW_pointer_in_pointer924)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ elif alt38 == 2:
+ # C.g:317:4: '*' pointer
+ self.match(self.input, 66, self.FOLLOW_66_in_pointer930)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_pointer_in_pointer932)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt38 == 3:
+ # C.g:318:4: '*'
+ self.match(self.input, 66, self.FOLLOW_66_in_pointer937)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 25, pointer_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end pointer
+
+
+ # $ANTLR start parameter_type_list
+ # C.g:321:1: parameter_type_list : parameter_list ( ',' ( 'OPTIONAL' )? '...' )? ;
+ def parameter_type_list(self, ):
+
+ parameter_type_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 26):
+ return
+
+ # C.g:322:2: ( parameter_list ( ',' ( 'OPTIONAL' )? '...' )? )
+ # C.g:322:4: parameter_list ( ',' ( 'OPTIONAL' )? '...' )?
+ self.following.append(self.FOLLOW_parameter_list_in_parameter_type_list948)
+ self.parameter_list()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:322:19: ( ',' ( 'OPTIONAL' )? '...' )?
+ alt40 = 2
+ LA40_0 = self.input.LA(1)
+
+ if (LA40_0 == 27) :
+ alt40 = 1
+ if alt40 == 1:
+ # C.g:322:20: ',' ( 'OPTIONAL' )? '...'
+ self.match(self.input, 27, self.FOLLOW_27_in_parameter_type_list951)
+ if self.failed:
+ return
+ # C.g:322:24: ( 'OPTIONAL' )?
+ alt39 = 2
+ LA39_0 = self.input.LA(1)
+
+ if (LA39_0 == 53) :
+ alt39 = 1
+ if alt39 == 1:
+ # C.g:322:25: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_parameter_type_list954)
+ if self.failed:
+ return
+
+
+
+ self.match(self.input, 67, self.FOLLOW_67_in_parameter_type_list958)
+ if self.failed:
+ return
+
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 26, parameter_type_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end parameter_type_list
+
+
+ # $ANTLR start parameter_list
+ # C.g:325:1: parameter_list : parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )* ;
+ def parameter_list(self, ):
+
+ parameter_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 27):
+ return
+
+ # C.g:326:2: ( parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )* )
+ # C.g:326:4: parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )*
+ self.following.append(self.FOLLOW_parameter_declaration_in_parameter_list971)
+ self.parameter_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:326:26: ( ',' ( 'OPTIONAL' )? parameter_declaration )*
+ while True: #loop42
+ alt42 = 2
+ LA42_0 = self.input.LA(1)
+
+ if (LA42_0 == 27) :
+ LA42_1 = self.input.LA(2)
+
+ if (LA42_1 == 53) :
+ LA42_3 = self.input.LA(3)
+
+ if (self.synpred82()) :
+ alt42 = 1
+
+
+ elif (LA42_1 == IDENTIFIER or (29 <= LA42_1 <= 42) or (45 <= LA42_1 <= 46) or (48 <= LA42_1 <= 52) or (54 <= LA42_1 <= 61) or LA42_1 == 66) :
+ alt42 = 1
+
+
+
+
+ if alt42 == 1:
+ # C.g:326:27: ',' ( 'OPTIONAL' )? parameter_declaration
+ self.match(self.input, 27, self.FOLLOW_27_in_parameter_list974)
+ if self.failed:
+ return
+ # C.g:326:31: ( 'OPTIONAL' )?
+ alt41 = 2
+ LA41_0 = self.input.LA(1)
+
+ if (LA41_0 == 53) :
+ LA41_1 = self.input.LA(2)
+
+ if (self.synpred81()) :
+ alt41 = 1
+ if alt41 == 1:
+ # C.g:326:32: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_parameter_list977)
+ if self.failed:
+ return
+
+
+
+ self.following.append(self.FOLLOW_parameter_declaration_in_parameter_list981)
+ self.parameter_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop42
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 27, parameter_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end parameter_list
+
+
+ # $ANTLR start parameter_declaration
+ # C.g:329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );
+ def parameter_declaration(self, ):
+
+ parameter_declaration_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 28):
+ return
+
+ # C.g:330:2: ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER )
+ alt46 = 2
+ LA46 = self.input.LA(1)
+ if LA46 == 29 or LA46 == 30 or LA46 == 31 or LA46 == 32 or LA46 == 33 or LA46 == 34 or LA46 == 35 or LA46 == 36 or LA46 == 37 or LA46 == 38 or LA46 == 39 or LA46 == 40 or LA46 == 41 or LA46 == 42 or LA46 == 45 or LA46 == 46 or LA46 == 48 or LA46 == 49 or LA46 == 50 or LA46 == 51 or LA46 == 52 or LA46 == 53 or LA46 == 54 or LA46 == 55 or LA46 == 56 or LA46 == 57 or LA46 == 58 or LA46 == 59 or LA46 == 60 or LA46 == 61:
+ alt46 = 1
+ elif LA46 == IDENTIFIER:
+ LA46_13 = self.input.LA(2)
+
+ if (self.synpred86()) :
+ alt46 = 1
+ elif (True) :
+ alt46 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 13, self.input)
+
+ raise nvae
+
+ elif LA46 == 66:
+ alt46 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 0, self.input)
+
+ raise nvae
+
+ if alt46 == 1:
+ # C.g:330:4: declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )?
+ self.following.append(self.FOLLOW_declaration_specifiers_in_parameter_declaration994)
+ self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:330:27: ( declarator | abstract_declarator )*
+ while True: #loop43
+ alt43 = 3
+ LA43 = self.input.LA(1)
+ if LA43 == 66:
+ LA43_5 = self.input.LA(2)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+ elif LA43 == IDENTIFIER or LA43 == 58 or LA43 == 59 or LA43 == 60:
+ alt43 = 1
+ elif LA43 == 62:
+ LA43 = self.input.LA(2)
+ if LA43 == 29 or LA43 == 30 or LA43 == 31 or LA43 == 32 or LA43 == 33 or LA43 == 34 or LA43 == 35 or LA43 == 36 or LA43 == 37 or LA43 == 38 or LA43 == 39 or LA43 == 40 or LA43 == 41 or LA43 == 42 or LA43 == 45 or LA43 == 46 or LA43 == 48 or LA43 == 49 or LA43 == 50 or LA43 == 51 or LA43 == 52 or LA43 == 53 or LA43 == 54 or LA43 == 55 or LA43 == 56 or LA43 == 57 or LA43 == 61 or LA43 == 63 or LA43 == 64:
+ alt43 = 2
+ elif LA43 == IDENTIFIER:
+ LA43_37 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+ elif LA43 == 58:
+ LA43_38 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+ elif LA43 == 66:
+ LA43_39 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+ elif LA43 == 59:
+ LA43_40 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+ elif LA43 == 60:
+ LA43_41 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+ elif LA43 == 62:
+ LA43_43 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt43 = 1
+ elif (self.synpred84()) :
+ alt43 = 2
+
+
+
+ elif LA43 == 64:
+ alt43 = 2
+
+ if alt43 == 1:
+ # C.g:330:28: declarator
+ self.following.append(self.FOLLOW_declarator_in_parameter_declaration997)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt43 == 2:
+ # C.g:330:39: abstract_declarator
+ self.following.append(self.FOLLOW_abstract_declarator_in_parameter_declaration999)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop43
+
+
+ # C.g:330:61: ( 'OPTIONAL' )?
+ alt44 = 2
+ LA44_0 = self.input.LA(1)
+
+ if (LA44_0 == 53) :
+ alt44 = 1
+ if alt44 == 1:
+ # C.g:330:62: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_parameter_declaration1004)
+ if self.failed:
+ return
+
+
+
+
+
+ elif alt46 == 2:
+ # C.g:332:4: ( pointer )* IDENTIFIER
+ # C.g:332:4: ( pointer )*
+ while True: #loop45
+ alt45 = 2
+ LA45_0 = self.input.LA(1)
+
+ if (LA45_0 == 66) :
+ alt45 = 1
+
+
+ if alt45 == 1:
+ # C.g:0:0: pointer
+ self.following.append(self.FOLLOW_pointer_in_parameter_declaration1013)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop45
+
+
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_parameter_declaration1016)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 28, parameter_declaration_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end parameter_declaration
+
+
+ # $ANTLR start identifier_list
+ # C.g:335:1: identifier_list : IDENTIFIER ( ',' IDENTIFIER )* ;
+ def identifier_list(self, ):
+
+ identifier_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 29):
+ return
+
+ # C.g:336:2: ( IDENTIFIER ( ',' IDENTIFIER )* )
+ # C.g:336:4: IDENTIFIER ( ',' IDENTIFIER )*
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1027)
+ if self.failed:
+ return
+ # C.g:337:2: ( ',' IDENTIFIER )*
+ while True: #loop47
+ alt47 = 2
+ LA47_0 = self.input.LA(1)
+
+ if (LA47_0 == 27) :
+ alt47 = 1
+
+
+ if alt47 == 1:
+ # C.g:337:3: ',' IDENTIFIER
+ self.match(self.input, 27, self.FOLLOW_27_in_identifier_list1031)
+ if self.failed:
+ return
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1033)
+ if self.failed:
+ return
+
+
+ else:
+ break #loop47
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 29, identifier_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end identifier_list
+
+
+ # $ANTLR start type_name
+ # C.g:340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );
+ def type_name(self, ):
+
+ type_name_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 30):
+ return
+
+ # C.g:341:2: ( specifier_qualifier_list ( abstract_declarator )? | type_id )
+ alt49 = 2
+ LA49_0 = self.input.LA(1)
+
+ if ((34 <= LA49_0 <= 42) or (45 <= LA49_0 <= 46) or (48 <= LA49_0 <= 61)) :
+ alt49 = 1
+ elif (LA49_0 == IDENTIFIER) :
+ LA49_13 = self.input.LA(2)
+
+ if (self.synpred90()) :
+ alt49 = 1
+ elif (True) :
+ alt49 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 13, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 0, self.input)
+
+ raise nvae
+
+ if alt49 == 1:
+ # C.g:341:4: specifier_qualifier_list ( abstract_declarator )?
+ self.following.append(self.FOLLOW_specifier_qualifier_list_in_type_name1046)
+ self.specifier_qualifier_list()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:341:29: ( abstract_declarator )?
+ alt48 = 2
+ LA48_0 = self.input.LA(1)
+
+ if (LA48_0 == 62 or LA48_0 == 64 or LA48_0 == 66) :
+ alt48 = 1
+ if alt48 == 1:
+ # C.g:0:0: abstract_declarator
+ self.following.append(self.FOLLOW_abstract_declarator_in_type_name1048)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ elif alt49 == 2:
+ # C.g:342:4: type_id
+ self.following.append(self.FOLLOW_type_id_in_type_name1054)
+ self.type_id()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 30, type_name_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end type_name
+
+
+ # $ANTLR start abstract_declarator
+ # C.g:345:1: abstract_declarator : ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator );
+ def abstract_declarator(self, ):
+
+ abstract_declarator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 31):
+ return
+
+ # C.g:346:2: ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator )
+ alt51 = 2
+ LA51_0 = self.input.LA(1)
+
+ if (LA51_0 == 66) :
+ alt51 = 1
+ elif (LA51_0 == 62 or LA51_0 == 64) :
+ alt51 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("345:1: abstract_declarator : ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator );", 51, 0, self.input)
+
+ raise nvae
+
+ if alt51 == 1:
+ # C.g:346:4: pointer ( direct_abstract_declarator )?
+ self.following.append(self.FOLLOW_pointer_in_abstract_declarator1065)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:346:12: ( direct_abstract_declarator )?
+ alt50 = 2
+ LA50_0 = self.input.LA(1)
+
+ if (LA50_0 == 62) :
+ LA50 = self.input.LA(2)
+ if LA50 == 63:
+ LA50_12 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 58:
+ LA50_13 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 66:
+ LA50_14 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 59:
+ LA50_15 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 60:
+ LA50_16 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == IDENTIFIER:
+ LA50_17 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 62:
+ LA50_18 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 64:
+ LA50_19 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 29 or LA50 == 30 or LA50 == 31 or LA50 == 32 or LA50 == 33:
+ LA50_20 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 34:
+ LA50_21 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 35:
+ LA50_22 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 36:
+ LA50_23 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 37:
+ LA50_24 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 38:
+ LA50_25 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 39:
+ LA50_26 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 40:
+ LA50_27 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 41:
+ LA50_28 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 42:
+ LA50_29 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 45 or LA50 == 46:
+ LA50_30 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 48:
+ LA50_31 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 49 or LA50 == 50 or LA50 == 51 or LA50 == 52 or LA50 == 53 or LA50 == 54 or LA50 == 55 or LA50 == 56 or LA50 == 57 or LA50 == 61:
+ LA50_32 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif (LA50_0 == 64) :
+ LA50 = self.input.LA(2)
+ if LA50 == 65:
+ LA50_33 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 62:
+ LA50_34 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == IDENTIFIER:
+ LA50_35 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == HEX_LITERAL:
+ LA50_36 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == OCTAL_LITERAL:
+ LA50_37 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == DECIMAL_LITERAL:
+ LA50_38 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == CHARACTER_LITERAL:
+ LA50_39 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == STRING_LITERAL:
+ LA50_40 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == FLOATING_POINT_LITERAL:
+ LA50_41 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 72:
+ LA50_42 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 73:
+ LA50_43 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 66 or LA50 == 68 or LA50 == 69 or LA50 == 77 or LA50 == 78 or LA50 == 79:
+ LA50_44 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ elif LA50 == 74:
+ LA50_45 = self.input.LA(3)
+
+ if (self.synpred91()) :
+ alt50 = 1
+ if alt50 == 1:
+ # C.g:0:0: direct_abstract_declarator
+ self.following.append(self.FOLLOW_direct_abstract_declarator_in_abstract_declarator1067)
+ self.direct_abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ elif alt51 == 2:
+ # C.g:347:4: direct_abstract_declarator
+ self.following.append(self.FOLLOW_direct_abstract_declarator_in_abstract_declarator1073)
+ self.direct_abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 31, abstract_declarator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end abstract_declarator
+
+
+ # $ANTLR start direct_abstract_declarator
+ # C.g:350:1: direct_abstract_declarator : ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )* ;
+ def direct_abstract_declarator(self, ):
+
+ direct_abstract_declarator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 32):
+ return
+
+ # C.g:351:2: ( ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )* )
+ # C.g:351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )*
+ # C.g:351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )
+ alt52 = 2
+ LA52_0 = self.input.LA(1)
+
+ if (LA52_0 == 62) :
+ LA52 = self.input.LA(2)
+ if LA52 == IDENTIFIER or LA52 == 29 or LA52 == 30 or LA52 == 31 or LA52 == 32 or LA52 == 33 or LA52 == 34 or LA52 == 35 or LA52 == 36 or LA52 == 37 or LA52 == 38 or LA52 == 39 or LA52 == 40 or LA52 == 41 or LA52 == 42 or LA52 == 45 or LA52 == 46 or LA52 == 48 or LA52 == 49 or LA52 == 50 or LA52 == 51 or LA52 == 52 or LA52 == 53 or LA52 == 54 or LA52 == 55 or LA52 == 56 or LA52 == 57 or LA52 == 58 or LA52 == 59 or LA52 == 60 or LA52 == 61 or LA52 == 63:
+ alt52 = 2
+ elif LA52 == 66:
+ LA52_18 = self.input.LA(3)
+
+ if (self.synpred93()) :
+ alt52 = 1
+ elif (True) :
+ alt52 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 18, self.input)
+
+ raise nvae
+
+ elif LA52 == 62 or LA52 == 64:
+ alt52 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 1, self.input)
+
+ raise nvae
+
+ elif (LA52_0 == 64) :
+ alt52 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 0, self.input)
+
+ raise nvae
+
+ if alt52 == 1:
+ # C.g:351:6: '(' abstract_declarator ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_direct_abstract_declarator1086)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_abstract_declarator_in_direct_abstract_declarator1088)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_direct_abstract_declarator1090)
+ if self.failed:
+ return
+
+
+ elif alt52 == 2:
+ # C.g:351:36: abstract_declarator_suffix
+ self.following.append(self.FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1094)
+ self.abstract_declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ # C.g:351:65: ( abstract_declarator_suffix )*
+ while True: #loop53
+ alt53 = 2
+ LA53_0 = self.input.LA(1)
+
+ if (LA53_0 == 62) :
+ LA53 = self.input.LA(2)
+ if LA53 == 63:
+ LA53_12 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 58:
+ LA53_13 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 66:
+ LA53_14 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 59:
+ LA53_15 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 60:
+ LA53_16 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == IDENTIFIER:
+ LA53_17 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 29 or LA53 == 30 or LA53 == 31 or LA53 == 32 or LA53 == 33:
+ LA53_19 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 34:
+ LA53_20 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 35:
+ LA53_21 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 36:
+ LA53_22 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 37:
+ LA53_23 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 38:
+ LA53_24 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 39:
+ LA53_25 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 40:
+ LA53_26 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 41:
+ LA53_27 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 42:
+ LA53_28 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 45 or LA53 == 46:
+ LA53_29 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 48:
+ LA53_30 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 49 or LA53 == 50 or LA53 == 51 or LA53 == 52 or LA53 == 53 or LA53 == 54 or LA53 == 55 or LA53 == 56 or LA53 == 57 or LA53 == 61:
+ LA53_31 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+
+ elif (LA53_0 == 64) :
+ LA53 = self.input.LA(2)
+ if LA53 == 65:
+ LA53_33 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 62:
+ LA53_34 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == IDENTIFIER:
+ LA53_35 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == HEX_LITERAL:
+ LA53_36 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == OCTAL_LITERAL:
+ LA53_37 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == DECIMAL_LITERAL:
+ LA53_38 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == CHARACTER_LITERAL:
+ LA53_39 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == STRING_LITERAL:
+ LA53_40 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == FLOATING_POINT_LITERAL:
+ LA53_41 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 72:
+ LA53_42 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 73:
+ LA53_43 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 66 or LA53 == 68 or LA53 == 69 or LA53 == 77 or LA53 == 78 or LA53 == 79:
+ LA53_44 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+ elif LA53 == 74:
+ LA53_45 = self.input.LA(3)
+
+ if (self.synpred94()) :
+ alt53 = 1
+
+
+
+
+
+ if alt53 == 1:
+ # C.g:0:0: abstract_declarator_suffix
+ self.following.append(self.FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1098)
+ self.abstract_declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop53
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 32, direct_abstract_declarator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end direct_abstract_declarator
+
+
+ # $ANTLR start abstract_declarator_suffix
+ # C.g:354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );
+ def abstract_declarator_suffix(self, ):
+
+ abstract_declarator_suffix_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 33):
+ return
+
+ # C.g:355:2: ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' )
+ alt54 = 4
+ LA54_0 = self.input.LA(1)
+
+ if (LA54_0 == 64) :
+ LA54_1 = self.input.LA(2)
+
+ if (LA54_1 == 65) :
+ alt54 = 1
+ elif ((IDENTIFIER <= LA54_1 <= FLOATING_POINT_LITERAL) or LA54_1 == 62 or LA54_1 == 66 or (68 <= LA54_1 <= 69) or (72 <= LA54_1 <= 74) or (77 <= LA54_1 <= 79)) :
+ alt54 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 1, self.input)
+
+ raise nvae
+
+ elif (LA54_0 == 62) :
+ LA54_2 = self.input.LA(2)
+
+ if (LA54_2 == 63) :
+ alt54 = 3
+ elif (LA54_2 == IDENTIFIER or (29 <= LA54_2 <= 42) or (45 <= LA54_2 <= 46) or (48 <= LA54_2 <= 61) or LA54_2 == 66) :
+ alt54 = 4
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 2, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 0, self.input)
+
+ raise nvae
+
+ if alt54 == 1:
+ # C.g:355:4: '[' ']'
+ self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1110)
+ if self.failed:
+ return
+ self.match(self.input, 65, self.FOLLOW_65_in_abstract_declarator_suffix1112)
+ if self.failed:
+ return
+
+
+ elif alt54 == 2:
+ # C.g:356:4: '[' constant_expression ']'
+ self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1117)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_constant_expression_in_abstract_declarator_suffix1119)
+ self.constant_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 65, self.FOLLOW_65_in_abstract_declarator_suffix1121)
+ if self.failed:
+ return
+
+
+ elif alt54 == 3:
+ # C.g:357:4: '(' ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1126)
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1128)
+ if self.failed:
+ return
+
+
+ elif alt54 == 4:
+ # C.g:358:4: '(' parameter_type_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1133)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_parameter_type_list_in_abstract_declarator_suffix1135)
+ self.parameter_type_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1137)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 33, abstract_declarator_suffix_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end abstract_declarator_suffix
+
+
+ # $ANTLR start initializer
+ # C.g:361:1: initializer : ( assignment_expression | '{' initializer_list ( ',' )? '}' );
+ def initializer(self, ):
+
+ initializer_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 34):
+ return
+
+ # C.g:363:2: ( assignment_expression | '{' initializer_list ( ',' )? '}' )
+ alt56 = 2
+ LA56_0 = self.input.LA(1)
+
+ if ((IDENTIFIER <= LA56_0 <= FLOATING_POINT_LITERAL) or LA56_0 == 62 or LA56_0 == 66 or (68 <= LA56_0 <= 69) or (72 <= LA56_0 <= 74) or (77 <= LA56_0 <= 79)) :
+ alt56 = 1
+ elif (LA56_0 == 43) :
+ alt56 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("361:1: initializer : ( assignment_expression | '{' initializer_list ( ',' )? '}' );", 56, 0, self.input)
+
+ raise nvae
+
+ if alt56 == 1:
+ # C.g:363:4: assignment_expression
+ self.following.append(self.FOLLOW_assignment_expression_in_initializer1150)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt56 == 2:
+ # C.g:364:4: '{' initializer_list ( ',' )? '}'
+ self.match(self.input, 43, self.FOLLOW_43_in_initializer1155)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_initializer_list_in_initializer1157)
+ self.initializer_list()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:364:25: ( ',' )?
+ alt55 = 2
+ LA55_0 = self.input.LA(1)
+
+ if (LA55_0 == 27) :
+ alt55 = 1
+ if alt55 == 1:
+ # C.g:0:0: ','
+ self.match(self.input, 27, self.FOLLOW_27_in_initializer1159)
+ if self.failed:
+ return
+
+
+
+ self.match(self.input, 44, self.FOLLOW_44_in_initializer1162)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 34, initializer_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end initializer
+
+
+ # $ANTLR start initializer_list
+ # C.g:367:1: initializer_list : initializer ( ',' initializer )* ;
+ def initializer_list(self, ):
+
+ initializer_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 35):
+ return
+
+ # C.g:368:2: ( initializer ( ',' initializer )* )
+ # C.g:368:4: initializer ( ',' initializer )*
+ self.following.append(self.FOLLOW_initializer_in_initializer_list1173)
+ self.initializer()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:368:16: ( ',' initializer )*
+ while True: #loop57
+ alt57 = 2
+ LA57_0 = self.input.LA(1)
+
+ if (LA57_0 == 27) :
+ LA57_1 = self.input.LA(2)
+
+ if ((IDENTIFIER <= LA57_1 <= FLOATING_POINT_LITERAL) or LA57_1 == 43 or LA57_1 == 62 or LA57_1 == 66 or (68 <= LA57_1 <= 69) or (72 <= LA57_1 <= 74) or (77 <= LA57_1 <= 79)) :
+ alt57 = 1
+
+
+
+
+ if alt57 == 1:
+ # C.g:368:17: ',' initializer
+ self.match(self.input, 27, self.FOLLOW_27_in_initializer_list1176)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_initializer_in_initializer_list1178)
+ self.initializer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop57
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 35, initializer_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end initializer_list
+
+ class argument_expression_list_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start argument_expression_list
+ # C.g:373:1: argument_expression_list : assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )* ;
+ def argument_expression_list(self, ):
+
+ retval = self.argument_expression_list_return()
+ retval.start = self.input.LT(1)
+ argument_expression_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 36):
+ return retval
+
+ # C.g:374:2: ( assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )* )
+ # C.g:374:6: assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )*
+ self.following.append(self.FOLLOW_assignment_expression_in_argument_expression_list1196)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:374:28: ( 'OPTIONAL' )?
+ alt58 = 2
+ LA58_0 = self.input.LA(1)
+
+ if (LA58_0 == 53) :
+ alt58 = 1
+ if alt58 == 1:
+ # C.g:374:29: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_argument_expression_list1199)
+ if self.failed:
+ return retval
+
+
+
+ # C.g:374:42: ( ',' assignment_expression ( 'OPTIONAL' )? )*
+ while True: #loop60
+ alt60 = 2
+ LA60_0 = self.input.LA(1)
+
+ if (LA60_0 == 27) :
+ alt60 = 1
+
+
+ if alt60 == 1:
+ # C.g:374:43: ',' assignment_expression ( 'OPTIONAL' )?
+ self.match(self.input, 27, self.FOLLOW_27_in_argument_expression_list1204)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_assignment_expression_in_argument_expression_list1206)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:374:69: ( 'OPTIONAL' )?
+ alt59 = 2
+ LA59_0 = self.input.LA(1)
+
+ if (LA59_0 == 53) :
+ alt59 = 1
+ if alt59 == 1:
+ # C.g:374:70: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_argument_expression_list1209)
+ if self.failed:
+ return retval
+
+
+
+
+
+ else:
+ break #loop60
+
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 36, argument_expression_list_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end argument_expression_list
+
+
+ # $ANTLR start additive_expression
+ # C.g:377:1: additive_expression : ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )* ;
+ def additive_expression(self, ):
+
+ additive_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 37):
+ return
+
+ # C.g:378:2: ( ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )* )
+ # C.g:378:4: ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )*
+ # C.g:378:4: ( multiplicative_expression )
+ # C.g:378:5: multiplicative_expression
+ self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1225)
+ self.multiplicative_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ # C.g:378:32: ( '+' multiplicative_expression | '-' multiplicative_expression )*
+ while True: #loop61
+ alt61 = 3
+ LA61_0 = self.input.LA(1)
+
+ if (LA61_0 == 68) :
+ alt61 = 1
+ elif (LA61_0 == 69) :
+ alt61 = 2
+
+
+ if alt61 == 1:
+ # C.g:378:33: '+' multiplicative_expression
+ self.match(self.input, 68, self.FOLLOW_68_in_additive_expression1229)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1231)
+ self.multiplicative_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt61 == 2:
+ # C.g:378:65: '-' multiplicative_expression
+ self.match(self.input, 69, self.FOLLOW_69_in_additive_expression1235)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1237)
+ self.multiplicative_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop61
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 37, additive_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end additive_expression
+
+
+ # $ANTLR start multiplicative_expression
+ # C.g:381:1: multiplicative_expression : ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* ;
+ def multiplicative_expression(self, ):
+
+ multiplicative_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 38):
+ return
+
+ # C.g:382:2: ( ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* )
+ # C.g:382:4: ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )*
+ # C.g:382:4: ( cast_expression )
+ # C.g:382:5: cast_expression
+ self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1251)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ # C.g:382:22: ( '*' cast_expression | '/' cast_expression | '%' cast_expression )*
+ while True: #loop62
+ alt62 = 4
+ LA62 = self.input.LA(1)
+ if LA62 == 66:
+ alt62 = 1
+ elif LA62 == 70:
+ alt62 = 2
+ elif LA62 == 71:
+ alt62 = 3
+
+ if alt62 == 1:
+ # C.g:382:23: '*' cast_expression
+ self.match(self.input, 66, self.FOLLOW_66_in_multiplicative_expression1255)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1257)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt62 == 2:
+ # C.g:382:45: '/' cast_expression
+ self.match(self.input, 70, self.FOLLOW_70_in_multiplicative_expression1261)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1263)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt62 == 3:
+ # C.g:382:67: '%' cast_expression
+ self.match(self.input, 71, self.FOLLOW_71_in_multiplicative_expression1267)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1269)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop62
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 38, multiplicative_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end multiplicative_expression
+
+
+ # $ANTLR start cast_expression
+ # C.g:385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );
+ def cast_expression(self, ):
+
+ cast_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 39):
+ return
+
+ # C.g:386:2: ( '(' type_name ')' cast_expression | unary_expression )
+ alt63 = 2
+ LA63_0 = self.input.LA(1)
+
+ if (LA63_0 == 62) :
+ LA63 = self.input.LA(2)
+ if LA63 == 34 or LA63 == 35 or LA63 == 36 or LA63 == 37 or LA63 == 38 or LA63 == 39 or LA63 == 40 or LA63 == 41 or LA63 == 42 or LA63 == 45 or LA63 == 46 or LA63 == 48 or LA63 == 49 or LA63 == 50 or LA63 == 51 or LA63 == 52 or LA63 == 53 or LA63 == 54 or LA63 == 55 or LA63 == 56 or LA63 == 57 or LA63 == 58 or LA63 == 59 or LA63 == 60 or LA63 == 61:
+ alt63 = 1
+ elif LA63 == IDENTIFIER:
+ LA63_25 = self.input.LA(3)
+
+ if (self.synpred109()) :
+ alt63 = 1
+ elif (True) :
+ alt63 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 25, self.input)
+
+ raise nvae
+
+ elif LA63 == HEX_LITERAL or LA63 == OCTAL_LITERAL or LA63 == DECIMAL_LITERAL or LA63 == CHARACTER_LITERAL or LA63 == STRING_LITERAL or LA63 == FLOATING_POINT_LITERAL or LA63 == 62 or LA63 == 66 or LA63 == 68 or LA63 == 69 or LA63 == 72 or LA63 == 73 or LA63 == 74 or LA63 == 77 or LA63 == 78 or LA63 == 79:
+ alt63 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 1, self.input)
+
+ raise nvae
+
+ elif ((IDENTIFIER <= LA63_0 <= FLOATING_POINT_LITERAL) or LA63_0 == 66 or (68 <= LA63_0 <= 69) or (72 <= LA63_0 <= 74) or (77 <= LA63_0 <= 79)) :
+ alt63 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 0, self.input)
+
+ raise nvae
+
+ if alt63 == 1:
+ # C.g:386:4: '(' type_name ')' cast_expression
+ self.match(self.input, 62, self.FOLLOW_62_in_cast_expression1282)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_type_name_in_cast_expression1284)
+ self.type_name()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_cast_expression1286)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_cast_expression_in_cast_expression1288)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt63 == 2:
+ # C.g:387:4: unary_expression
+ self.following.append(self.FOLLOW_unary_expression_in_cast_expression1293)
+ self.unary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 39, cast_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end cast_expression
+
+
+ # $ANTLR start unary_expression
+ # C.g:390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );
+ def unary_expression(self, ):
+
+ unary_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 40):
+ return
+
+ # C.g:391:2: ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' )
+ alt64 = 6
+ LA64 = self.input.LA(1)
+ if LA64 == IDENTIFIER or LA64 == HEX_LITERAL or LA64 == OCTAL_LITERAL or LA64 == DECIMAL_LITERAL or LA64 == CHARACTER_LITERAL or LA64 == STRING_LITERAL or LA64 == FLOATING_POINT_LITERAL or LA64 == 62:
+ alt64 = 1
+ elif LA64 == 72:
+ alt64 = 2
+ elif LA64 == 73:
+ alt64 = 3
+ elif LA64 == 66 or LA64 == 68 or LA64 == 69 or LA64 == 77 or LA64 == 78 or LA64 == 79:
+ alt64 = 4
+ elif LA64 == 74:
+ LA64_12 = self.input.LA(2)
+
+ if (LA64_12 == 62) :
+ LA64_13 = self.input.LA(3)
+
+ if (self.synpred114()) :
+ alt64 = 5
+ elif (True) :
+ alt64 = 6
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 13, self.input)
+
+ raise nvae
+
+ elif ((IDENTIFIER <= LA64_12 <= FLOATING_POINT_LITERAL) or LA64_12 == 66 or (68 <= LA64_12 <= 69) or (72 <= LA64_12 <= 74) or (77 <= LA64_12 <= 79)) :
+ alt64 = 5
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 12, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 0, self.input)
+
+ raise nvae
+
+ if alt64 == 1:
+ # C.g:391:4: postfix_expression
+ self.following.append(self.FOLLOW_postfix_expression_in_unary_expression1304)
+ self.postfix_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt64 == 2:
+ # C.g:392:4: '++' unary_expression
+ self.match(self.input, 72, self.FOLLOW_72_in_unary_expression1309)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_unary_expression_in_unary_expression1311)
+ self.unary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt64 == 3:
+ # C.g:393:4: '--' unary_expression
+ self.match(self.input, 73, self.FOLLOW_73_in_unary_expression1316)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_unary_expression_in_unary_expression1318)
+ self.unary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt64 == 4:
+ # C.g:394:4: unary_operator cast_expression
+ self.following.append(self.FOLLOW_unary_operator_in_unary_expression1323)
+ self.unary_operator()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_cast_expression_in_unary_expression1325)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt64 == 5:
+ # C.g:395:4: 'sizeof' unary_expression
+ self.match(self.input, 74, self.FOLLOW_74_in_unary_expression1330)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_unary_expression_in_unary_expression1332)
+ self.unary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt64 == 6:
+ # C.g:396:4: 'sizeof' '(' type_name ')'
+ self.match(self.input, 74, self.FOLLOW_74_in_unary_expression1337)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_unary_expression1339)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_type_name_in_unary_expression1341)
+ self.type_name()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_unary_expression1343)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 40, unary_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end unary_expression
+
+
+ # $ANTLR start postfix_expression
+ # C.g:399:1: postfix_expression : p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* ;
+ def postfix_expression(self, ):
+ self.postfix_expression_stack.append(postfix_expression_scope())
+ postfix_expression_StartIndex = self.input.index()
+ a = None
+ b = None
+ x = None
+ y = None
+ z = None
+ p = None
+
+ c = None
+
+
+
+ self.postfix_expression_stack[-1].FuncCallText = ''
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 41):
+ return
+
+ # C.g:406:2: (p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* )
+ # C.g:406:6: p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )*
+ self.following.append(self.FOLLOW_primary_expression_in_postfix_expression1367)
+ p = self.primary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.postfix_expression_stack[-1].FuncCallText += self.input.toString(p.start, p.stop)
+
+ # C.g:407:9: ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )*
+ while True: #loop65
+ alt65 = 10
+ LA65 = self.input.LA(1)
+ if LA65 == 66:
+ LA65_1 = self.input.LA(2)
+
+ if (LA65_1 == IDENTIFIER) :
+ LA65_30 = self.input.LA(3)
+
+ if (self.synpred120()) :
+ alt65 = 6
+
+
+
+
+ elif LA65 == 64:
+ alt65 = 1
+ elif LA65 == 62:
+ LA65 = self.input.LA(2)
+ if LA65 == 63:
+ alt65 = 2
+ elif LA65 == 29 or LA65 == 30 or LA65 == 31 or LA65 == 32 or LA65 == 33 or LA65 == 34 or LA65 == 35 or LA65 == 36 or LA65 == 37 or LA65 == 38 or LA65 == 39 or LA65 == 40 or LA65 == 41 or LA65 == 42 or LA65 == 45 or LA65 == 46 or LA65 == 48 or LA65 == 49 or LA65 == 50 or LA65 == 51 or LA65 == 52 or LA65 == 53 or LA65 == 54 or LA65 == 55 or LA65 == 56 or LA65 == 57 or LA65 == 58 or LA65 == 59 or LA65 == 60 or LA65 == 61:
+ alt65 = 4
+ elif LA65 == IDENTIFIER:
+ LA65_55 = self.input.LA(3)
+
+ if (self.synpred117()) :
+ alt65 = 3
+ elif (self.synpred118()) :
+ alt65 = 4
+
+
+ elif LA65 == 66:
+ LA65_57 = self.input.LA(3)
+
+ if (self.synpred117()) :
+ alt65 = 3
+ elif (self.synpred118()) :
+ alt65 = 4
+
+
+ elif LA65 == HEX_LITERAL or LA65 == OCTAL_LITERAL or LA65 == DECIMAL_LITERAL or LA65 == CHARACTER_LITERAL or LA65 == STRING_LITERAL or LA65 == FLOATING_POINT_LITERAL or LA65 == 62 or LA65 == 68 or LA65 == 69 or LA65 == 72 or LA65 == 73 or LA65 == 74 or LA65 == 77 or LA65 == 78 or LA65 == 79:
+ alt65 = 3
+
+ elif LA65 == 75:
+ alt65 = 5
+ elif LA65 == 76:
+ alt65 = 7
+ elif LA65 == 72:
+ alt65 = 8
+ elif LA65 == 73:
+ alt65 = 9
+
+ if alt65 == 1:
+ # C.g:407:13: '[' expression ']'
+ self.match(self.input, 64, self.FOLLOW_64_in_postfix_expression1383)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_postfix_expression1385)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 65, self.FOLLOW_65_in_postfix_expression1387)
+ if self.failed:
+ return
+
+
+ elif alt65 == 2:
+ # C.g:408:13: '(' a= ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1401)
+ if self.failed:
+ return
+ a = self.input.LT(1)
+ self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1405)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, a.line, a.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, '')
+
+
+
+ elif alt65 == 3:
+ # C.g:409:13: '(' c= argument_expression_list b= ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1420)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_argument_expression_list_in_postfix_expression1424)
+ c = self.argument_expression_list()
+ self.following.pop()
+ if self.failed:
+ return
+ b = self.input.LT(1)
+ self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1428)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, b.line, b.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, self.input.toString(c.start, c.stop))
+
+
+
+ elif alt65 == 4:
+ # C.g:410:13: '(' macro_parameter_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1444)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_macro_parameter_list_in_postfix_expression1446)
+ self.macro_parameter_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1448)
+ if self.failed:
+ return
+
+
+ elif alt65 == 5:
+ # C.g:411:13: '.' x= IDENTIFIER
+ self.match(self.input, 75, self.FOLLOW_75_in_postfix_expression1462)
+ if self.failed:
+ return
+ x = self.input.LT(1)
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1466)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.postfix_expression_stack[-1].FuncCallText += '.' + x.text
+
+
+
+ elif alt65 == 6:
+ # C.g:412:13: '*' y= IDENTIFIER
+ self.match(self.input, 66, self.FOLLOW_66_in_postfix_expression1482)
+ if self.failed:
+ return
+ y = self.input.LT(1)
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1486)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.postfix_expression_stack[-1].FuncCallText = y.text
+
+
+
+ elif alt65 == 7:
+ # C.g:413:13: '->' z= IDENTIFIER
+ self.match(self.input, 76, self.FOLLOW_76_in_postfix_expression1502)
+ if self.failed:
+ return
+ z = self.input.LT(1)
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1506)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.postfix_expression_stack[-1].FuncCallText += '->' + z.text
+
+
+
+ elif alt65 == 8:
+ # C.g:414:13: '++'
+ self.match(self.input, 72, self.FOLLOW_72_in_postfix_expression1522)
+ if self.failed:
+ return
+
+
+ elif alt65 == 9:
+ # C.g:415:13: '--'
+ self.match(self.input, 73, self.FOLLOW_73_in_postfix_expression1536)
+ if self.failed:
+ return
+
+
+ else:
+ break #loop65
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 41, postfix_expression_StartIndex)
+
+ self.postfix_expression_stack.pop()
+ pass
+
+ return
+
+ # $ANTLR end postfix_expression
+
+
+ # $ANTLR start macro_parameter_list
+ # C.g:419:1: macro_parameter_list : parameter_declaration ( ',' parameter_declaration )* ;
+ def macro_parameter_list(self, ):
+
+ macro_parameter_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 42):
+ return
+
+ # C.g:420:2: ( parameter_declaration ( ',' parameter_declaration )* )
+ # C.g:420:4: parameter_declaration ( ',' parameter_declaration )*
+ self.following.append(self.FOLLOW_parameter_declaration_in_macro_parameter_list1559)
+ self.parameter_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:420:26: ( ',' parameter_declaration )*
+ while True: #loop66
+ alt66 = 2
+ LA66_0 = self.input.LA(1)
+
+ if (LA66_0 == 27) :
+ alt66 = 1
+
+
+ if alt66 == 1:
+ # C.g:420:27: ',' parameter_declaration
+ self.match(self.input, 27, self.FOLLOW_27_in_macro_parameter_list1562)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_parameter_declaration_in_macro_parameter_list1564)
+ self.parameter_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop66
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 42, macro_parameter_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end macro_parameter_list
+
+
+ # $ANTLR start unary_operator
+ # C.g:423:1: unary_operator : ( '&' | '*' | '+' | '-' | '~' | '!' );
+ def unary_operator(self, ):
+
+ unary_operator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 43):
+ return
+
+ # C.g:424:2: ( '&' | '*' | '+' | '-' | '~' | '!' )
+ # C.g:
+ if self.input.LA(1) == 66 or (68 <= self.input.LA(1) <= 69) or (77 <= self.input.LA(1) <= 79):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_unary_operator0
+ )
+ raise mse
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 43, unary_operator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end unary_operator
+
+ class primary_expression_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start primary_expression
+ # C.g:432:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );
+ def primary_expression(self, ):
+
+ retval = self.primary_expression_return()
+ retval.start = self.input.LT(1)
+ primary_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 44):
+ return retval
+
+ # C.g:433:2: ( IDENTIFIER | constant | '(' expression ')' )
+ alt67 = 3
+ LA67 = self.input.LA(1)
+ if LA67 == IDENTIFIER:
+ LA67_1 = self.input.LA(2)
+
+ if (LA67_1 == EOF or LA67_1 == 25 or (27 <= LA67_1 <= 28) or LA67_1 == 44 or LA67_1 == 47 or LA67_1 == 53 or (62 <= LA67_1 <= 66) or (68 <= LA67_1 <= 73) or (75 <= LA67_1 <= 77) or (80 <= LA67_1 <= 102)) :
+ alt67 = 1
+ elif (LA67_1 == IDENTIFIER or LA67_1 == STRING_LITERAL) :
+ alt67 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("432:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );", 67, 1, self.input)
+
+ raise nvae
+
+ elif LA67 == HEX_LITERAL or LA67 == OCTAL_LITERAL or LA67 == DECIMAL_LITERAL or LA67 == CHARACTER_LITERAL or LA67 == STRING_LITERAL or LA67 == FLOATING_POINT_LITERAL:
+ alt67 = 2
+ elif LA67 == 62:
+ alt67 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("432:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );", 67, 0, self.input)
+
+ raise nvae
+
+ if alt67 == 1:
+ # C.g:433:4: IDENTIFIER
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_primary_expression1613)
+ if self.failed:
+ return retval
+
+
+ elif alt67 == 2:
+ # C.g:434:4: constant
+ self.following.append(self.FOLLOW_constant_in_primary_expression1618)
+ self.constant()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ elif alt67 == 3:
+ # C.g:435:4: '(' expression ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_primary_expression1623)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_expression_in_primary_expression1625)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+ self.match(self.input, 63, self.FOLLOW_63_in_primary_expression1627)
+ if self.failed:
+ return retval
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 44, primary_expression_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end primary_expression
+
+
+ # $ANTLR start constant
+ # C.g:438:1: constant : ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL );
+ def constant(self, ):
+
+ constant_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 45):
+ return
+
+ # C.g:439:5: ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL )
+ alt72 = 6
+ LA72 = self.input.LA(1)
+ if LA72 == HEX_LITERAL:
+ alt72 = 1
+ elif LA72 == OCTAL_LITERAL:
+ alt72 = 2
+ elif LA72 == DECIMAL_LITERAL:
+ alt72 = 3
+ elif LA72 == CHARACTER_LITERAL:
+ alt72 = 4
+ elif LA72 == IDENTIFIER or LA72 == STRING_LITERAL:
+ alt72 = 5
+ elif LA72 == FLOATING_POINT_LITERAL:
+ alt72 = 6
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("438:1: constant : ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL );", 72, 0, self.input)
+
+ raise nvae
+
+ if alt72 == 1:
+ # C.g:439:9: HEX_LITERAL
+ self.match(self.input, HEX_LITERAL, self.FOLLOW_HEX_LITERAL_in_constant1643)
+ if self.failed:
+ return
+
+
+ elif alt72 == 2:
+ # C.g:440:9: OCTAL_LITERAL
+ self.match(self.input, OCTAL_LITERAL, self.FOLLOW_OCTAL_LITERAL_in_constant1653)
+ if self.failed:
+ return
+
+
+ elif alt72 == 3:
+ # C.g:441:9: DECIMAL_LITERAL
+ self.match(self.input, DECIMAL_LITERAL, self.FOLLOW_DECIMAL_LITERAL_in_constant1663)
+ if self.failed:
+ return
+
+
+ elif alt72 == 4:
+ # C.g:442:7: CHARACTER_LITERAL
+ self.match(self.input, CHARACTER_LITERAL, self.FOLLOW_CHARACTER_LITERAL_in_constant1671)
+ if self.failed:
+ return
+
+
+ elif alt72 == 5:
+ # C.g:443:7: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )*
+ # C.g:443:7: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+
+ cnt70 = 0
+ while True: #loop70
+ alt70 = 2
+ LA70_0 = self.input.LA(1)
+
+ if (LA70_0 == IDENTIFIER) :
+ LA70_1 = self.input.LA(2)
+
+ if (LA70_1 == STRING_LITERAL) :
+ alt70 = 1
+ elif (LA70_1 == IDENTIFIER) :
+ LA70_33 = self.input.LA(3)
+
+ if (self.synpred138()) :
+ alt70 = 1
+
+
+
+
+ elif (LA70_0 == STRING_LITERAL) :
+ alt70 = 1
+
+
+ if alt70 == 1:
+ # C.g:443:8: ( IDENTIFIER )* ( STRING_LITERAL )+
+ # C.g:443:8: ( IDENTIFIER )*
+ while True: #loop68
+ alt68 = 2
+ LA68_0 = self.input.LA(1)
+
+ if (LA68_0 == IDENTIFIER) :
+ alt68 = 1
+
+
+ if alt68 == 1:
+ # C.g:0:0: IDENTIFIER
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1680)
+ if self.failed:
+ return
+
+
+ else:
+ break #loop68
+
+
+ # C.g:443:20: ( STRING_LITERAL )+
+ cnt69 = 0
+ while True: #loop69
+ alt69 = 2
+ LA69_0 = self.input.LA(1)
+
+ if (LA69_0 == STRING_LITERAL) :
+ LA69_31 = self.input.LA(2)
+
+ if (self.synpred137()) :
+ alt69 = 1
+
+
+
+
+ if alt69 == 1:
+ # C.g:0:0: STRING_LITERAL
+ self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_constant1683)
+ if self.failed:
+ return
+
+
+ else:
+ if cnt69 >= 1:
+ break #loop69
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(69, self.input)
+ raise eee
+
+ cnt69 += 1
+
+
+
+
+ else:
+ if cnt70 >= 1:
+ break #loop70
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(70, self.input)
+ raise eee
+
+ cnt70 += 1
+
+
+ # C.g:443:38: ( IDENTIFIER )*
+ while True: #loop71
+ alt71 = 2
+ LA71_0 = self.input.LA(1)
+
+ if (LA71_0 == IDENTIFIER) :
+ alt71 = 1
+
+
+ if alt71 == 1:
+ # C.g:0:0: IDENTIFIER
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1688)
+ if self.failed:
+ return
+
+
+ else:
+ break #loop71
+
+
+
+
+ elif alt72 == 6:
+ # C.g:444:9: FLOATING_POINT_LITERAL
+ self.match(self.input, FLOATING_POINT_LITERAL, self.FOLLOW_FLOATING_POINT_LITERAL_in_constant1699)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 45, constant_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end constant
+
+ class expression_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start expression
+ # C.g:449:1: expression : assignment_expression ( ',' assignment_expression )* ;
+ def expression(self, ):
+
+ retval = self.expression_return()
+ retval.start = self.input.LT(1)
+ expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 46):
+ return retval
+
+ # C.g:450:2: ( assignment_expression ( ',' assignment_expression )* )
+ # C.g:450:4: assignment_expression ( ',' assignment_expression )*
+ self.following.append(self.FOLLOW_assignment_expression_in_expression1715)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:450:26: ( ',' assignment_expression )*
+ while True: #loop73
+ alt73 = 2
+ LA73_0 = self.input.LA(1)
+
+ if (LA73_0 == 27) :
+ alt73 = 1
+
+
+ if alt73 == 1:
+ # C.g:450:27: ',' assignment_expression
+ self.match(self.input, 27, self.FOLLOW_27_in_expression1718)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_assignment_expression_in_expression1720)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ else:
+ break #loop73
+
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 46, expression_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end expression
+
+
+ # $ANTLR start constant_expression
+ # C.g:453:1: constant_expression : conditional_expression ;
+ def constant_expression(self, ):
+
+ constant_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 47):
+ return
+
+ # C.g:454:2: ( conditional_expression )
+ # C.g:454:4: conditional_expression
+ self.following.append(self.FOLLOW_conditional_expression_in_constant_expression1733)
+ self.conditional_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 47, constant_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end constant_expression
+
+
+ # $ANTLR start assignment_expression
+ # C.g:457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );
+ def assignment_expression(self, ):
+
+ assignment_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 48):
+ return
+
+ # C.g:458:2: ( lvalue assignment_operator assignment_expression | conditional_expression )
+ alt74 = 2
+ LA74 = self.input.LA(1)
+ if LA74 == IDENTIFIER:
+ LA74 = self.input.LA(2)
+ if LA74 == 64:
+ LA74_13 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 13, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_14 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 14, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_15 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 15, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_16 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 16, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_17 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 17, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_18 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 18, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_19 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 19, self.input)
+
+ raise nvae
+
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ elif LA74 == STRING_LITERAL:
+ LA74_21 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 21, self.input)
+
+ raise nvae
+
+ elif LA74 == IDENTIFIER:
+ LA74_22 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 22, self.input)
+
+ raise nvae
+
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 1, self.input)
+
+ raise nvae
+
+ elif LA74 == HEX_LITERAL:
+ LA74 = self.input.LA(2)
+ if LA74 == 64:
+ LA74_44 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 44, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_45 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 45, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_46 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 46, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_47 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 47, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_48 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 48, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_49 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 49, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_50 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 50, self.input)
+
+ raise nvae
+
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 2, self.input)
+
+ raise nvae
+
+ elif LA74 == OCTAL_LITERAL:
+ LA74 = self.input.LA(2)
+ if LA74 == 64:
+ LA74_73 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 73, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_74 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 74, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_75 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 75, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_76 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 76, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_77 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 77, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_78 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 78, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_79 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 79, self.input)
+
+ raise nvae
+
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 3, self.input)
+
+ raise nvae
+
+ elif LA74 == DECIMAL_LITERAL:
+ LA74 = self.input.LA(2)
+ if LA74 == 64:
+ LA74_102 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 102, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_103 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 103, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_104 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 104, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_105 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 105, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_106 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 106, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_107 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 107, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_108 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 108, self.input)
+
+ raise nvae
+
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 4, self.input)
+
+ raise nvae
+
+ elif LA74 == CHARACTER_LITERAL:
+ LA74 = self.input.LA(2)
+ if LA74 == 64:
+ LA74_131 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 131, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_132 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 132, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_133 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 133, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_134 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 134, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_135 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 135, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_136 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 136, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_137 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 137, self.input)
+
+ raise nvae
+
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 5, self.input)
+
+ raise nvae
+
+ elif LA74 == STRING_LITERAL:
+ LA74 = self.input.LA(2)
+ if LA74 == IDENTIFIER:
+ LA74_160 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 160, self.input)
+
+ raise nvae
+
+ elif LA74 == 64:
+ LA74_161 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 161, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_162 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 162, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_163 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 163, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_164 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 164, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_165 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 165, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_166 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 166, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_167 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 167, self.input)
+
+ raise nvae
+
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ elif LA74 == STRING_LITERAL:
+ LA74_189 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 189, self.input)
+
+ raise nvae
+
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 6, self.input)
+
+ raise nvae
+
+ elif LA74 == FLOATING_POINT_LITERAL:
+ LA74 = self.input.LA(2)
+ if LA74 == 64:
+ LA74_191 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 191, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_192 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 192, self.input)
+
+ raise nvae
+
+ elif LA74 == 75:
+ LA74_193 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 193, self.input)
+
+ raise nvae
+
+ elif LA74 == 66:
+ LA74_194 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 194, self.input)
+
+ raise nvae
+
+ elif LA74 == 76:
+ LA74_195 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 195, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_196 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 196, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_197 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 197, self.input)
+
+ raise nvae
+
+ elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
+ alt74 = 2
+ elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
+ alt74 = 1
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 7, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74 = self.input.LA(2)
+ if LA74 == IDENTIFIER:
+ LA74_220 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 220, self.input)
+
+ raise nvae
+
+ elif LA74 == HEX_LITERAL:
+ LA74_221 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 221, self.input)
+
+ raise nvae
+
+ elif LA74 == OCTAL_LITERAL:
+ LA74_222 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 222, self.input)
+
+ raise nvae
+
+ elif LA74 == DECIMAL_LITERAL:
+ LA74_223 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 223, self.input)
+
+ raise nvae
+
+ elif LA74 == CHARACTER_LITERAL:
+ LA74_224 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 224, self.input)
+
+ raise nvae
+
+ elif LA74 == STRING_LITERAL:
+ LA74_225 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 225, self.input)
+
+ raise nvae
+
+ elif LA74 == FLOATING_POINT_LITERAL:
+ LA74_226 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 226, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_227 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 227, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_228 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 228, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_229 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 229, self.input)
+
+ raise nvae
+
+ elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
+ LA74_230 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 230, self.input)
+
+ raise nvae
+
+ elif LA74 == 74:
+ LA74_231 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 231, self.input)
+
+ raise nvae
+
+ elif LA74 == 34 or LA74 == 35 or LA74 == 36 or LA74 == 37 or LA74 == 38 or LA74 == 39 or LA74 == 40 or LA74 == 41 or LA74 == 42 or LA74 == 45 or LA74 == 46 or LA74 == 48 or LA74 == 49 or LA74 == 50 or LA74 == 51 or LA74 == 52 or LA74 == 53 or LA74 == 54 or LA74 == 55 or LA74 == 56 or LA74 == 57 or LA74 == 58 or LA74 == 59 or LA74 == 60 or LA74 == 61:
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 8, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74 = self.input.LA(2)
+ if LA74 == IDENTIFIER:
+ LA74_244 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 244, self.input)
+
+ raise nvae
+
+ elif LA74 == HEX_LITERAL:
+ LA74_245 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 245, self.input)
+
+ raise nvae
+
+ elif LA74 == OCTAL_LITERAL:
+ LA74_246 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 246, self.input)
+
+ raise nvae
+
+ elif LA74 == DECIMAL_LITERAL:
+ LA74_247 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 247, self.input)
+
+ raise nvae
+
+ elif LA74 == CHARACTER_LITERAL:
+ LA74_248 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 248, self.input)
+
+ raise nvae
+
+ elif LA74 == STRING_LITERAL:
+ LA74_249 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 249, self.input)
+
+ raise nvae
+
+ elif LA74 == FLOATING_POINT_LITERAL:
+ LA74_250 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 250, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_251 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 251, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_252 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 252, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_253 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 253, self.input)
+
+ raise nvae
+
+ elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
+ LA74_254 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 254, self.input)
+
+ raise nvae
+
+ elif LA74 == 74:
+ LA74_255 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 255, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 9, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74 = self.input.LA(2)
+ if LA74 == IDENTIFIER:
+ LA74_256 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 256, self.input)
+
+ raise nvae
+
+ elif LA74 == HEX_LITERAL:
+ LA74_257 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 257, self.input)
+
+ raise nvae
+
+ elif LA74 == OCTAL_LITERAL:
+ LA74_258 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 258, self.input)
+
+ raise nvae
+
+ elif LA74 == DECIMAL_LITERAL:
+ LA74_259 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 259, self.input)
+
+ raise nvae
+
+ elif LA74 == CHARACTER_LITERAL:
+ LA74_260 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 260, self.input)
+
+ raise nvae
+
+ elif LA74 == STRING_LITERAL:
+ LA74_261 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 261, self.input)
+
+ raise nvae
+
+ elif LA74 == FLOATING_POINT_LITERAL:
+ LA74_262 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 262, self.input)
+
+ raise nvae
+
+ elif LA74 == 62:
+ LA74_263 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 263, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_264 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 264, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_265 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 265, self.input)
+
+ raise nvae
+
+ elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
+ LA74_266 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 266, self.input)
+
+ raise nvae
+
+ elif LA74 == 74:
+ LA74_267 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 267, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 10, self.input)
+
+ raise nvae
+
+ elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
+ LA74 = self.input.LA(2)
+ if LA74 == 62:
+ LA74_268 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 268, self.input)
+
+ raise nvae
+
+ elif LA74 == IDENTIFIER:
+ LA74_269 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 269, self.input)
+
+ raise nvae
+
+ elif LA74 == HEX_LITERAL:
+ LA74_270 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 270, self.input)
+
+ raise nvae
+
+ elif LA74 == OCTAL_LITERAL:
+ LA74_271 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 271, self.input)
+
+ raise nvae
+
+ elif LA74 == DECIMAL_LITERAL:
+ LA74_272 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 272, self.input)
+
+ raise nvae
+
+ elif LA74 == CHARACTER_LITERAL:
+ LA74_273 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 273, self.input)
+
+ raise nvae
+
+ elif LA74 == STRING_LITERAL:
+ LA74_274 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 274, self.input)
+
+ raise nvae
+
+ elif LA74 == FLOATING_POINT_LITERAL:
+ LA74_275 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 275, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_276 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 276, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_277 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 277, self.input)
+
+ raise nvae
+
+ elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
+ LA74_278 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 278, self.input)
+
+ raise nvae
+
+ elif LA74 == 74:
+ LA74_279 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 279, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 11, self.input)
+
+ raise nvae
+
+ elif LA74 == 74:
+ LA74 = self.input.LA(2)
+ if LA74 == 62:
+ LA74_280 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 280, self.input)
+
+ raise nvae
+
+ elif LA74 == IDENTIFIER:
+ LA74_281 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 281, self.input)
+
+ raise nvae
+
+ elif LA74 == HEX_LITERAL:
+ LA74_282 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 282, self.input)
+
+ raise nvae
+
+ elif LA74 == OCTAL_LITERAL:
+ LA74_283 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 283, self.input)
+
+ raise nvae
+
+ elif LA74 == DECIMAL_LITERAL:
+ LA74_284 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 284, self.input)
+
+ raise nvae
+
+ elif LA74 == CHARACTER_LITERAL:
+ LA74_285 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 285, self.input)
+
+ raise nvae
+
+ elif LA74 == STRING_LITERAL:
+ LA74_286 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 286, self.input)
+
+ raise nvae
+
+ elif LA74 == FLOATING_POINT_LITERAL:
+ LA74_287 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 287, self.input)
+
+ raise nvae
+
+ elif LA74 == 72:
+ LA74_288 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 288, self.input)
+
+ raise nvae
+
+ elif LA74 == 73:
+ LA74_289 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 289, self.input)
+
+ raise nvae
+
+ elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
+ LA74_290 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 290, self.input)
+
+ raise nvae
+
+ elif LA74 == 74:
+ LA74_291 = self.input.LA(3)
+
+ if (self.synpred142()) :
+ alt74 = 1
+ elif (True) :
+ alt74 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 291, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 12, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 0, self.input)
+
+ raise nvae
+
+ if alt74 == 1:
+ # C.g:458:4: lvalue assignment_operator assignment_expression
+ self.following.append(self.FOLLOW_lvalue_in_assignment_expression1744)
+ self.lvalue()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_assignment_operator_in_assignment_expression1746)
+ self.assignment_operator()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_assignment_expression_in_assignment_expression1748)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt74 == 2:
+ # C.g:459:4: conditional_expression
+ self.following.append(self.FOLLOW_conditional_expression_in_assignment_expression1753)
+ self.conditional_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 48, assignment_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end assignment_expression
+
+
+ # $ANTLR start lvalue
+ # C.g:462:1: lvalue : unary_expression ;
+ def lvalue(self, ):
+
+ lvalue_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 49):
+ return
+
+ # C.g:463:2: ( unary_expression )
+ # C.g:463:4: unary_expression
+ self.following.append(self.FOLLOW_unary_expression_in_lvalue1765)
+ self.unary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 49, lvalue_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end lvalue
+
+
+ # $ANTLR start assignment_operator
+ # C.g:466:1: assignment_operator : ( '=' | '*=' | '/=' | '%=' | '+=' | '-=' | '<<=' | '>>=' | '&=' | '^=' | '|=' );
+ def assignment_operator(self, ):
+
+ assignment_operator_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 50):
+ return
+
+ # C.g:467:2: ( '=' | '*=' | '/=' | '%=' | '+=' | '-=' | '<<=' | '>>=' | '&=' | '^=' | '|=' )
+ # C.g:
+ if self.input.LA(1) == 28 or (80 <= self.input.LA(1) <= 89):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_assignment_operator0
+ )
+ raise mse
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 50, assignment_operator_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end assignment_operator
+
+
+ # $ANTLR start conditional_expression
+ # C.g:480:1: conditional_expression : e= logical_or_expression ( '?' expression ':' conditional_expression )? ;
+ def conditional_expression(self, ):
+
+ conditional_expression_StartIndex = self.input.index()
+ e = None
+
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 51):
+ return
+
+ # C.g:481:2: (e= logical_or_expression ( '?' expression ':' conditional_expression )? )
+ # C.g:481:4: e= logical_or_expression ( '?' expression ':' conditional_expression )?
+ self.following.append(self.FOLLOW_logical_or_expression_in_conditional_expression1839)
+ e = self.logical_or_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:481:28: ( '?' expression ':' conditional_expression )?
+ alt75 = 2
+ LA75_0 = self.input.LA(1)
+
+ if (LA75_0 == 90) :
+ alt75 = 1
+ if alt75 == 1:
+ # C.g:481:29: '?' expression ':' conditional_expression
+ self.match(self.input, 90, self.FOLLOW_90_in_conditional_expression1842)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_conditional_expression1844)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 47, self.FOLLOW_47_in_conditional_expression1846)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_conditional_expression_in_conditional_expression1848)
+ self.conditional_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
+
+
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 51, conditional_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end conditional_expression
+
+ class logical_or_expression_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start logical_or_expression
+ # C.g:484:1: logical_or_expression : logical_and_expression ( '||' logical_and_expression )* ;
+ def logical_or_expression(self, ):
+
+ retval = self.logical_or_expression_return()
+ retval.start = self.input.LT(1)
+ logical_or_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 52):
+ return retval
+
+ # C.g:485:2: ( logical_and_expression ( '||' logical_and_expression )* )
+ # C.g:485:4: logical_and_expression ( '||' logical_and_expression )*
+ self.following.append(self.FOLLOW_logical_and_expression_in_logical_or_expression1863)
+ self.logical_and_expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+ # C.g:485:27: ( '||' logical_and_expression )*
+ while True: #loop76
+ alt76 = 2
+ LA76_0 = self.input.LA(1)
+
+ if (LA76_0 == 91) :
+ alt76 = 1
+
+
+ if alt76 == 1:
+ # C.g:485:28: '||' logical_and_expression
+ self.match(self.input, 91, self.FOLLOW_91_in_logical_or_expression1866)
+ if self.failed:
+ return retval
+ self.following.append(self.FOLLOW_logical_and_expression_in_logical_or_expression1868)
+ self.logical_and_expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ else:
+ break #loop76
+
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 52, logical_or_expression_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end logical_or_expression
+
+
+ # $ANTLR start logical_and_expression
+ # C.g:488:1: logical_and_expression : inclusive_or_expression ( '&&' inclusive_or_expression )* ;
+ def logical_and_expression(self, ):
+
+ logical_and_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 53):
+ return
+
+ # C.g:489:2: ( inclusive_or_expression ( '&&' inclusive_or_expression )* )
+ # C.g:489:4: inclusive_or_expression ( '&&' inclusive_or_expression )*
+ self.following.append(self.FOLLOW_inclusive_or_expression_in_logical_and_expression1881)
+ self.inclusive_or_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:489:28: ( '&&' inclusive_or_expression )*
+ while True: #loop77
+ alt77 = 2
+ LA77_0 = self.input.LA(1)
+
+ if (LA77_0 == 92) :
+ alt77 = 1
+
+
+ if alt77 == 1:
+ # C.g:489:29: '&&' inclusive_or_expression
+ self.match(self.input, 92, self.FOLLOW_92_in_logical_and_expression1884)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_inclusive_or_expression_in_logical_and_expression1886)
+ self.inclusive_or_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop77
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 53, logical_and_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end logical_and_expression
+
+
+ # $ANTLR start inclusive_or_expression
+ # C.g:492:1: inclusive_or_expression : exclusive_or_expression ( '|' exclusive_or_expression )* ;
+ def inclusive_or_expression(self, ):
+
+ inclusive_or_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 54):
+ return
+
+ # C.g:493:2: ( exclusive_or_expression ( '|' exclusive_or_expression )* )
+ # C.g:493:4: exclusive_or_expression ( '|' exclusive_or_expression )*
+ self.following.append(self.FOLLOW_exclusive_or_expression_in_inclusive_or_expression1899)
+ self.exclusive_or_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:493:28: ( '|' exclusive_or_expression )*
+ while True: #loop78
+ alt78 = 2
+ LA78_0 = self.input.LA(1)
+
+ if (LA78_0 == 93) :
+ alt78 = 1
+
+
+ if alt78 == 1:
+ # C.g:493:29: '|' exclusive_or_expression
+ self.match(self.input, 93, self.FOLLOW_93_in_inclusive_or_expression1902)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_exclusive_or_expression_in_inclusive_or_expression1904)
+ self.exclusive_or_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop78
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 54, inclusive_or_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end inclusive_or_expression
+
+
+ # $ANTLR start exclusive_or_expression
+ # C.g:496:1: exclusive_or_expression : and_expression ( '^' and_expression )* ;
+ def exclusive_or_expression(self, ):
+
+ exclusive_or_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 55):
+ return
+
+ # C.g:497:2: ( and_expression ( '^' and_expression )* )
+ # C.g:497:4: and_expression ( '^' and_expression )*
+ self.following.append(self.FOLLOW_and_expression_in_exclusive_or_expression1917)
+ self.and_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:497:19: ( '^' and_expression )*
+ while True: #loop79
+ alt79 = 2
+ LA79_0 = self.input.LA(1)
+
+ if (LA79_0 == 94) :
+ alt79 = 1
+
+
+ if alt79 == 1:
+ # C.g:497:20: '^' and_expression
+ self.match(self.input, 94, self.FOLLOW_94_in_exclusive_or_expression1920)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_and_expression_in_exclusive_or_expression1922)
+ self.and_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop79
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 55, exclusive_or_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end exclusive_or_expression
+
+
+ # $ANTLR start and_expression
+ # C.g:500:1: and_expression : equality_expression ( '&' equality_expression )* ;
+ def and_expression(self, ):
+
+ and_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 56):
+ return
+
+ # C.g:501:2: ( equality_expression ( '&' equality_expression )* )
+ # C.g:501:4: equality_expression ( '&' equality_expression )*
+ self.following.append(self.FOLLOW_equality_expression_in_and_expression1935)
+ self.equality_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:501:24: ( '&' equality_expression )*
+ while True: #loop80
+ alt80 = 2
+ LA80_0 = self.input.LA(1)
+
+ if (LA80_0 == 77) :
+ alt80 = 1
+
+
+ if alt80 == 1:
+ # C.g:501:25: '&' equality_expression
+ self.match(self.input, 77, self.FOLLOW_77_in_and_expression1938)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_equality_expression_in_and_expression1940)
+ self.equality_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop80
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 56, and_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end and_expression
+
+
+ # $ANTLR start equality_expression
+ # C.g:503:1: equality_expression : relational_expression ( ( '==' | '!=' ) relational_expression )* ;
+ def equality_expression(self, ):
+
+ equality_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 57):
+ return
+
+ # C.g:504:2: ( relational_expression ( ( '==' | '!=' ) relational_expression )* )
+ # C.g:504:4: relational_expression ( ( '==' | '!=' ) relational_expression )*
+ self.following.append(self.FOLLOW_relational_expression_in_equality_expression1952)
+ self.relational_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:504:26: ( ( '==' | '!=' ) relational_expression )*
+ while True: #loop81
+ alt81 = 2
+ LA81_0 = self.input.LA(1)
+
+ if ((95 <= LA81_0 <= 96)) :
+ alt81 = 1
+
+
+ if alt81 == 1:
+ # C.g:504:27: ( '==' | '!=' ) relational_expression
+ if (95 <= self.input.LA(1) <= 96):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_equality_expression1955
+ )
+ raise mse
+
+
+ self.following.append(self.FOLLOW_relational_expression_in_equality_expression1961)
+ self.relational_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop81
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 57, equality_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end equality_expression
+
+
+ # $ANTLR start relational_expression
+ # C.g:507:1: relational_expression : shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* ;
+ def relational_expression(self, ):
+
+ relational_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 58):
+ return
+
+ # C.g:508:2: ( shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* )
+ # C.g:508:4: shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )*
+ self.following.append(self.FOLLOW_shift_expression_in_relational_expression1975)
+ self.shift_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:508:21: ( ( '<' | '>' | '<=' | '>=' ) shift_expression )*
+ while True: #loop82
+ alt82 = 2
+ LA82_0 = self.input.LA(1)
+
+ if ((97 <= LA82_0 <= 100)) :
+ alt82 = 1
+
+
+ if alt82 == 1:
+ # C.g:508:22: ( '<' | '>' | '<=' | '>=' ) shift_expression
+ if (97 <= self.input.LA(1) <= 100):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_relational_expression1978
+ )
+ raise mse
+
+
+ self.following.append(self.FOLLOW_shift_expression_in_relational_expression1988)
+ self.shift_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop82
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 58, relational_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end relational_expression
+
+
+ # $ANTLR start shift_expression
+ # C.g:511:1: shift_expression : additive_expression ( ( '<<' | '>>' ) additive_expression )* ;
+ def shift_expression(self, ):
+
+ shift_expression_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 59):
+ return
+
+ # C.g:512:2: ( additive_expression ( ( '<<' | '>>' ) additive_expression )* )
+ # C.g:512:4: additive_expression ( ( '<<' | '>>' ) additive_expression )*
+ self.following.append(self.FOLLOW_additive_expression_in_shift_expression2001)
+ self.additive_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:512:24: ( ( '<<' | '>>' ) additive_expression )*
+ while True: #loop83
+ alt83 = 2
+ LA83_0 = self.input.LA(1)
+
+ if ((101 <= LA83_0 <= 102)) :
+ alt83 = 1
+
+
+ if alt83 == 1:
+ # C.g:512:25: ( '<<' | '>>' ) additive_expression
+ if (101 <= self.input.LA(1) <= 102):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_shift_expression2004
+ )
+ raise mse
+
+
+ self.following.append(self.FOLLOW_additive_expression_in_shift_expression2010)
+ self.additive_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop83
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 59, shift_expression_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end shift_expression
+
+
+ # $ANTLR start statement
+ # C.g:517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );
+ def statement(self, ):
+
+ statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 60):
+ return
+
+ # C.g:518:2: ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration )
+ alt84 = 11
+ LA84 = self.input.LA(1)
+ if LA84 == IDENTIFIER:
+ LA84 = self.input.LA(2)
+ if LA84 == 62:
+ LA84_43 = self.input.LA(3)
+
+ if (self.synpred169()) :
+ alt84 = 3
+ elif (self.synpred173()) :
+ alt84 = 7
+ elif (self.synpred174()) :
+ alt84 = 8
+ elif (True) :
+ alt84 = 11
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 43, self.input)
+
+ raise nvae
+
+ elif LA84 == 47:
+ alt84 = 1
+ elif LA84 == STRING_LITERAL or LA84 == 27 or LA84 == 28 or LA84 == 64 or LA84 == 68 or LA84 == 69 or LA84 == 70 or LA84 == 71 or LA84 == 72 or LA84 == 73 or LA84 == 75 or LA84 == 76 or LA84 == 77 or LA84 == 80 or LA84 == 81 or LA84 == 82 or LA84 == 83 or LA84 == 84 or LA84 == 85 or LA84 == 86 or LA84 == 87 or LA84 == 88 or LA84 == 89 or LA84 == 90 or LA84 == 91 or LA84 == 92 or LA84 == 93 or LA84 == 94 or LA84 == 95 or LA84 == 96 or LA84 == 97 or LA84 == 98 or LA84 == 99 or LA84 == 100 or LA84 == 101 or LA84 == 102:
+ alt84 = 3
+ elif LA84 == 66:
+ LA84_47 = self.input.LA(3)
+
+ if (self.synpred169()) :
+ alt84 = 3
+ elif (True) :
+ alt84 = 11
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 47, self.input)
+
+ raise nvae
+
+ elif LA84 == IDENTIFIER:
+ LA84_53 = self.input.LA(3)
+
+ if (self.synpred169()) :
+ alt84 = 3
+ elif (True) :
+ alt84 = 11
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 53, self.input)
+
+ raise nvae
+
+ elif LA84 == 25:
+ LA84_68 = self.input.LA(3)
+
+ if (self.synpred169()) :
+ alt84 = 3
+ elif (True) :
+ alt84 = 11
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 68, self.input)
+
+ raise nvae
+
+ elif LA84 == 29 or LA84 == 30 or LA84 == 31 or LA84 == 32 or LA84 == 33 or LA84 == 34 or LA84 == 35 or LA84 == 36 or LA84 == 37 or LA84 == 38 or LA84 == 39 or LA84 == 40 or LA84 == 41 or LA84 == 42 or LA84 == 45 or LA84 == 46 or LA84 == 48 or LA84 == 49 or LA84 == 50 or LA84 == 51 or LA84 == 52 or LA84 == 53 or LA84 == 54 or LA84 == 55 or LA84 == 56 or LA84 == 57 or LA84 == 58 or LA84 == 59 or LA84 == 60 or LA84 == 61:
+ alt84 = 11
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 1, self.input)
+
+ raise nvae
+
+ elif LA84 == 106 or LA84 == 107:
+ alt84 = 1
+ elif LA84 == 43:
+ alt84 = 2
+ elif LA84 == HEX_LITERAL or LA84 == OCTAL_LITERAL or LA84 == DECIMAL_LITERAL or LA84 == CHARACTER_LITERAL or LA84 == STRING_LITERAL or LA84 == FLOATING_POINT_LITERAL or LA84 == 25 or LA84 == 62 or LA84 == 66 or LA84 == 68 or LA84 == 69 or LA84 == 72 or LA84 == 73 or LA84 == 74 or LA84 == 77 or LA84 == 78 or LA84 == 79:
+ alt84 = 3
+ elif LA84 == 108 or LA84 == 110:
+ alt84 = 4
+ elif LA84 == 111 or LA84 == 112 or LA84 == 113:
+ alt84 = 5
+ elif LA84 == 114 or LA84 == 115 or LA84 == 116 or LA84 == 117:
+ alt84 = 6
+ elif LA84 == 103:
+ alt84 = 8
+ elif LA84 == 104:
+ alt84 = 9
+ elif LA84 == 105:
+ alt84 = 10
+ elif LA84 == 26 or LA84 == 29 or LA84 == 30 or LA84 == 31 or LA84 == 32 or LA84 == 33 or LA84 == 34 or LA84 == 35 or LA84 == 36 or LA84 == 37 or LA84 == 38 or LA84 == 39 or LA84 == 40 or LA84 == 41 or LA84 == 42 or LA84 == 45 or LA84 == 46 or LA84 == 48 or LA84 == 49 or LA84 == 50 or LA84 == 51 or LA84 == 52 or LA84 == 53 or LA84 == 54 or LA84 == 55 or LA84 == 56 or LA84 == 57 or LA84 == 58 or LA84 == 59 or LA84 == 60 or LA84 == 61:
+ alt84 = 11
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 0, self.input)
+
+ raise nvae
+
+ if alt84 == 1:
+ # C.g:518:4: labeled_statement
+ self.following.append(self.FOLLOW_labeled_statement_in_statement2025)
+ self.labeled_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 2:
+ # C.g:519:4: compound_statement
+ self.following.append(self.FOLLOW_compound_statement_in_statement2030)
+ self.compound_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 3:
+ # C.g:520:4: expression_statement
+ self.following.append(self.FOLLOW_expression_statement_in_statement2035)
+ self.expression_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 4:
+ # C.g:521:4: selection_statement
+ self.following.append(self.FOLLOW_selection_statement_in_statement2040)
+ self.selection_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 5:
+ # C.g:522:4: iteration_statement
+ self.following.append(self.FOLLOW_iteration_statement_in_statement2045)
+ self.iteration_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 6:
+ # C.g:523:4: jump_statement
+ self.following.append(self.FOLLOW_jump_statement_in_statement2050)
+ self.jump_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 7:
+ # C.g:524:4: macro_statement
+ self.following.append(self.FOLLOW_macro_statement_in_statement2055)
+ self.macro_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 8:
+ # C.g:525:4: asm2_statement
+ self.following.append(self.FOLLOW_asm2_statement_in_statement2060)
+ self.asm2_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 9:
+ # C.g:526:4: asm1_statement
+ self.following.append(self.FOLLOW_asm1_statement_in_statement2065)
+ self.asm1_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 10:
+ # C.g:527:4: asm_statement
+ self.following.append(self.FOLLOW_asm_statement_in_statement2070)
+ self.asm_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt84 == 11:
+ # C.g:528:4: declaration
+ self.following.append(self.FOLLOW_declaration_in_statement2075)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 60, statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end statement
+
+
+ # $ANTLR start asm2_statement
+ # C.g:531:1: asm2_statement : ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';' ;
+ def asm2_statement(self, ):
+
+ asm2_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 61):
+ return
+
+ # C.g:532:2: ( ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';' )
+ # C.g:532:4: ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';'
+ # C.g:532:4: ( '__asm__' )?
+ alt85 = 2
+ LA85_0 = self.input.LA(1)
+
+ if (LA85_0 == 103) :
+ alt85 = 1
+ if alt85 == 1:
+ # C.g:0:0: '__asm__'
+ self.match(self.input, 103, self.FOLLOW_103_in_asm2_statement2086)
+ if self.failed:
+ return
+
+
+
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_asm2_statement2089)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_asm2_statement2091)
+ if self.failed:
+ return
+ # C.g:532:30: (~ ( ';' ) )*
+ while True: #loop86
+ alt86 = 2
+ LA86_0 = self.input.LA(1)
+
+ if (LA86_0 == 63) :
+ LA86_1 = self.input.LA(2)
+
+ if ((IDENTIFIER <= LA86_1 <= LINE_COMMAND) or (26 <= LA86_1 <= 117)) :
+ alt86 = 1
+
+
+ elif ((IDENTIFIER <= LA86_0 <= LINE_COMMAND) or (26 <= LA86_0 <= 62) or (64 <= LA86_0 <= 117)) :
+ alt86 = 1
+
+
+ if alt86 == 1:
+ # C.g:532:31: ~ ( ';' )
+ if (IDENTIFIER <= self.input.LA(1) <= LINE_COMMAND) or (26 <= self.input.LA(1) <= 117):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_asm2_statement2094
+ )
+ raise mse
+
+
+
+
+ else:
+ break #loop86
+
+
+ self.match(self.input, 63, self.FOLLOW_63_in_asm2_statement2101)
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_asm2_statement2103)
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 61, asm2_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end asm2_statement
+
+
+ # $ANTLR start asm1_statement
+ # C.g:535:1: asm1_statement : '_asm' '{' (~ ( '}' ) )* '}' ;
+ def asm1_statement(self, ):
+
+ asm1_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 62):
+ return
+
+ # C.g:536:2: ( '_asm' '{' (~ ( '}' ) )* '}' )
+ # C.g:536:4: '_asm' '{' (~ ( '}' ) )* '}'
+ self.match(self.input, 104, self.FOLLOW_104_in_asm1_statement2115)
+ if self.failed:
+ return
+ self.match(self.input, 43, self.FOLLOW_43_in_asm1_statement2117)
+ if self.failed:
+ return
+ # C.g:536:15: (~ ( '}' ) )*
+ while True: #loop87
+ alt87 = 2
+ LA87_0 = self.input.LA(1)
+
+ if ((IDENTIFIER <= LA87_0 <= 43) or (45 <= LA87_0 <= 117)) :
+ alt87 = 1
+
+
+ if alt87 == 1:
+ # C.g:536:16: ~ ( '}' )
+ if (IDENTIFIER <= self.input.LA(1) <= 43) or (45 <= self.input.LA(1) <= 117):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_asm1_statement2120
+ )
+ raise mse
+
+
+
+
+ else:
+ break #loop87
+
+
+ self.match(self.input, 44, self.FOLLOW_44_in_asm1_statement2127)
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 62, asm1_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end asm1_statement
+
+
+ # $ANTLR start asm_statement
+ # C.g:539:1: asm_statement : '__asm' '{' (~ ( '}' ) )* '}' ;
+ def asm_statement(self, ):
+
+ asm_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 63):
+ return
+
+ # C.g:540:2: ( '__asm' '{' (~ ( '}' ) )* '}' )
+ # C.g:540:4: '__asm' '{' (~ ( '}' ) )* '}'
+ self.match(self.input, 105, self.FOLLOW_105_in_asm_statement2138)
+ if self.failed:
+ return
+ self.match(self.input, 43, self.FOLLOW_43_in_asm_statement2140)
+ if self.failed:
+ return
+ # C.g:540:16: (~ ( '}' ) )*
+ while True: #loop88
+ alt88 = 2
+ LA88_0 = self.input.LA(1)
+
+ if ((IDENTIFIER <= LA88_0 <= 43) or (45 <= LA88_0 <= 117)) :
+ alt88 = 1
+
+
+ if alt88 == 1:
+ # C.g:540:17: ~ ( '}' )
+ if (IDENTIFIER <= self.input.LA(1) <= 43) or (45 <= self.input.LA(1) <= 117):
+ self.input.consume();
+ self.errorRecovery = False
+ self.failed = False
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ mse = MismatchedSetException(None, self.input)
+ self.recoverFromMismatchedSet(
+ self.input, mse, self.FOLLOW_set_in_asm_statement2143
+ )
+ raise mse
+
+
+
+
+ else:
+ break #loop88
+
+
+ self.match(self.input, 44, self.FOLLOW_44_in_asm_statement2150)
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 63, asm_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end asm_statement
+
+
+ # $ANTLR start macro_statement
+ # C.g:543:1: macro_statement : IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')' ;
+ def macro_statement(self, ):
+
+ macro_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 64):
+ return
+
+ # C.g:544:2: ( IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')' )
+ # C.g:544:4: IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')'
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_macro_statement2162)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_macro_statement2164)
+ if self.failed:
+ return
+ # C.g:544:19: ( declaration )*
+ while True: #loop89
+ alt89 = 2
+ LA89 = self.input.LA(1)
+ if LA89 == IDENTIFIER:
+ LA89 = self.input.LA(2)
+ if LA89 == 62:
+ LA89_45 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_47 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 66:
+ LA89_50 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_68 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_71 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_72 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_73 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_74 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_75 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_76 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_77 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_78 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_79 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_80 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_81 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_82 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_83 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_84 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_85 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_86 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 26:
+ LA89 = self.input.LA(2)
+ if LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_87 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_88 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_89 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_90 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_91 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_92 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_93 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_94 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_95 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_96 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_97 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_98 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_99 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_100 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 66:
+ LA89_101 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_102 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_103 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_104 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_105 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_106 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_107 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_108 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_109 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_110 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_111 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_112 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_113 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_114 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_115 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_116 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_117 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_118 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_119 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_120 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_121 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_122 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_123 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_124 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_125 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 34:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_126 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_127 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_128 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_129 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_130 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_131 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_132 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_133 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_134 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_135 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_136 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_137 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_138 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_139 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_140 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_141 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_142 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_143 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_144 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_145 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 35:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_146 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_147 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_148 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_149 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_150 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_151 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_152 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_153 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_154 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_155 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_156 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_157 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_158 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_159 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_160 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_161 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_162 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_163 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_164 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_165 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 36:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_166 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_167 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_168 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_169 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_170 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_171 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_172 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_173 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_174 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_175 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_176 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_177 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_178 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_179 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_180 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_181 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_182 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_183 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_184 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_185 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 37:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_186 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_187 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_188 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_189 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_190 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_191 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_192 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_193 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_194 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_195 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_196 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_197 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_198 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_199 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_200 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_201 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_202 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_203 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_204 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_205 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 38:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_206 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_207 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_208 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_209 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_210 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_211 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_212 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_213 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_214 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_215 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_216 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_217 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_218 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_219 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_220 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_221 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_222 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_223 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_224 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_225 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 39:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_226 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_227 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_228 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_229 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_230 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_231 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_232 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_233 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_234 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_235 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_236 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_237 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_238 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_239 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_240 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_241 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_242 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_243 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_244 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_245 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 40:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_246 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_247 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_248 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_249 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_250 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_251 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_252 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_253 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_254 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_255 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_256 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_257 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_258 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_259 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_260 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_261 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_262 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_263 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_264 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_265 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 41:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_266 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_267 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_268 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_269 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_270 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_271 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_272 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_273 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_274 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_275 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_276 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_277 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_278 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_279 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_280 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_281 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_282 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_283 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_284 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_285 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 42:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_286 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_287 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_288 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_289 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_290 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_291 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_292 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_293 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_294 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_295 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_296 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_297 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_298 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_299 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_300 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_301 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_302 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_303 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_304 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_305 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_40 = self.input.LA(2)
+
+ if (LA89_40 == IDENTIFIER) :
+ LA89_306 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif (LA89_40 == 43) :
+ LA89_307 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+
+ elif LA89 == 48:
+ LA89_41 = self.input.LA(2)
+
+ if (LA89_41 == 43) :
+ LA89_308 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif (LA89_41 == IDENTIFIER) :
+ LA89_309 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 58 or LA89 == 59 or LA89 == 60 or LA89 == 61:
+ LA89 = self.input.LA(2)
+ if LA89 == 66:
+ LA89_310 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 58:
+ LA89_311 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 59:
+ LA89_312 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 60:
+ LA89_313 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == IDENTIFIER:
+ LA89_314 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 62:
+ LA89_315 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 25:
+ LA89_316 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
+ LA89_317 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 34:
+ LA89_318 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 35:
+ LA89_319 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 36:
+ LA89_320 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 37:
+ LA89_321 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 38:
+ LA89_322 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 39:
+ LA89_323 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 40:
+ LA89_324 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 41:
+ LA89_325 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 42:
+ LA89_326 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 45 or LA89 == 46:
+ LA89_327 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 48:
+ LA89_328 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+ elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
+ LA89_329 = self.input.LA(3)
+
+ if (self.synpred181()) :
+ alt89 = 1
+
+
+
+
+ if alt89 == 1:
+ # C.g:0:0: declaration
+ self.following.append(self.FOLLOW_declaration_in_macro_statement2166)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop89
+
+
+ # C.g:544:33: ( statement_list )?
+ alt90 = 2
+ LA90 = self.input.LA(1)
+ if LA90 == IDENTIFIER:
+ LA90 = self.input.LA(2)
+ if LA90 == 25 or LA90 == 29 or LA90 == 30 or LA90 == 31 or LA90 == 32 or LA90 == 33 or LA90 == 34 or LA90 == 35 or LA90 == 36 or LA90 == 37 or LA90 == 38 or LA90 == 39 or LA90 == 40 or LA90 == 41 or LA90 == 42 or LA90 == 45 or LA90 == 46 or LA90 == 47 or LA90 == 48 or LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60 or LA90 == 61:
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_45 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_46 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == IDENTIFIER:
+ LA90_47 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 64:
+ LA90_48 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_49 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_50 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_51 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_52 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_53 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_54 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_55 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_56 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_57 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_58 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_59 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_60 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_61 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_62 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_63 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_64 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_65 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_66 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_67 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_70 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25 or LA90 == 26 or LA90 == 29 or LA90 == 30 or LA90 == 31 or LA90 == 32 or LA90 == 33 or LA90 == 34 or LA90 == 35 or LA90 == 36 or LA90 == 37 or LA90 == 38 or LA90 == 39 or LA90 == 40 or LA90 == 41 or LA90 == 42 or LA90 == 43 or LA90 == 45 or LA90 == 46 or LA90 == 48 or LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60 or LA90 == 61 or LA90 == 103 or LA90 == 104 or LA90 == 105 or LA90 == 106 or LA90 == 107 or LA90 == 108 or LA90 == 110 or LA90 == 111 or LA90 == 112 or LA90 == 113 or LA90 == 114 or LA90 == 115 or LA90 == 116 or LA90 == 117:
+ alt90 = 1
+ elif LA90 == HEX_LITERAL:
+ LA90 = self.input.LA(2)
+ if LA90 == 64:
+ LA90_87 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_88 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_89 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_90 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_91 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_92 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_93 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_94 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_95 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_96 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_97 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_98 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_99 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_100 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_101 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_102 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_103 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_104 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_105 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_106 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_107 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_108 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25:
+ alt90 = 1
+ elif LA90 == OCTAL_LITERAL:
+ LA90 = self.input.LA(2)
+ if LA90 == 64:
+ LA90_111 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_112 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_113 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_114 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_115 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_116 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_117 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_118 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_119 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_120 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_121 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_122 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_123 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_124 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_125 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_126 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_127 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_128 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_129 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_130 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_131 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25:
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_134 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == DECIMAL_LITERAL:
+ LA90 = self.input.LA(2)
+ if LA90 == 64:
+ LA90_135 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_136 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_137 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_138 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_139 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_140 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_141 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_142 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_143 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_144 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_145 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_146 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_147 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_148 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_149 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_150 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_151 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_152 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_153 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_154 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_155 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_156 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25:
+ alt90 = 1
+ elif LA90 == CHARACTER_LITERAL:
+ LA90 = self.input.LA(2)
+ if LA90 == 64:
+ LA90_159 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_160 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_161 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_162 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_163 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_164 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_165 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_166 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_167 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_168 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_169 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_170 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_171 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_172 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_173 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_174 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_175 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_176 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_177 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_178 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_179 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25:
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_181 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90 = self.input.LA(2)
+ if LA90 == IDENTIFIER:
+ LA90_183 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 64:
+ LA90_184 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_185 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_186 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_187 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_188 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_189 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_190 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_191 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_192 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_193 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_194 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_195 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_196 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_197 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_198 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_199 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_200 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_201 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_202 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_203 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_204 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_205 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_206 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25:
+ alt90 = 1
+ elif LA90 == FLOATING_POINT_LITERAL:
+ LA90 = self.input.LA(2)
+ if LA90 == 64:
+ LA90_209 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_210 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 75:
+ LA90_211 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66:
+ LA90_212 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 76:
+ LA90_213 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_214 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_215 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
+ LA90_216 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 70:
+ LA90_217 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 71:
+ LA90_218 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 68:
+ LA90_219 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 69:
+ LA90_220 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 101 or LA90 == 102:
+ LA90_221 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
+ LA90_222 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 95 or LA90 == 96:
+ LA90_223 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 77:
+ LA90_224 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 94:
+ LA90_225 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 93:
+ LA90_226 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 92:
+ LA90_227 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 91:
+ LA90_228 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 90:
+ LA90_229 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 27:
+ LA90_230 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 25:
+ alt90 = 1
+ elif LA90 == 62:
+ LA90 = self.input.LA(2)
+ if LA90 == IDENTIFIER:
+ LA90_233 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == HEX_LITERAL:
+ LA90_234 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == OCTAL_LITERAL:
+ LA90_235 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == DECIMAL_LITERAL:
+ LA90_236 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == CHARACTER_LITERAL:
+ LA90_237 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_238 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == FLOATING_POINT_LITERAL:
+ LA90_239 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_240 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_241 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_242 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
+ LA90_243 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 74:
+ LA90_244 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60 or LA90 == 61:
+ LA90_245 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 34:
+ LA90_246 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 35:
+ LA90_247 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 36:
+ LA90_248 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 37:
+ LA90_249 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 38:
+ LA90_250 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 39:
+ LA90_251 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 40:
+ LA90_252 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 41:
+ LA90_253 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 42:
+ LA90_254 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 45 or LA90 == 46:
+ LA90_255 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 48:
+ LA90_256 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90 = self.input.LA(2)
+ if LA90 == IDENTIFIER:
+ LA90_257 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == HEX_LITERAL:
+ LA90_258 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == OCTAL_LITERAL:
+ LA90_259 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == DECIMAL_LITERAL:
+ LA90_260 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == CHARACTER_LITERAL:
+ LA90_261 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_262 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == FLOATING_POINT_LITERAL:
+ LA90_263 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_264 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_265 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_266 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
+ LA90_267 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 74:
+ LA90_268 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90 = self.input.LA(2)
+ if LA90 == IDENTIFIER:
+ LA90_269 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == HEX_LITERAL:
+ LA90_270 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == OCTAL_LITERAL:
+ LA90_271 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == DECIMAL_LITERAL:
+ LA90_272 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == CHARACTER_LITERAL:
+ LA90_273 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_274 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == FLOATING_POINT_LITERAL:
+ LA90_275 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 62:
+ LA90_276 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_277 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_278 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
+ LA90_279 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 74:
+ LA90_280 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
+ LA90 = self.input.LA(2)
+ if LA90 == 62:
+ LA90_281 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == IDENTIFIER:
+ LA90_282 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == HEX_LITERAL:
+ LA90_283 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == OCTAL_LITERAL:
+ LA90_284 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == DECIMAL_LITERAL:
+ LA90_285 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == CHARACTER_LITERAL:
+ LA90_286 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_287 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == FLOATING_POINT_LITERAL:
+ LA90_288 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_289 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_290 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
+ LA90_291 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 74:
+ LA90_292 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 74:
+ LA90 = self.input.LA(2)
+ if LA90 == 62:
+ LA90_293 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == IDENTIFIER:
+ LA90_294 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == HEX_LITERAL:
+ LA90_295 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == OCTAL_LITERAL:
+ LA90_296 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == DECIMAL_LITERAL:
+ LA90_297 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == CHARACTER_LITERAL:
+ LA90_298 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == STRING_LITERAL:
+ LA90_299 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == FLOATING_POINT_LITERAL:
+ LA90_300 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 72:
+ LA90_301 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 73:
+ LA90_302 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
+ LA90_303 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ elif LA90 == 74:
+ LA90_304 = self.input.LA(3)
+
+ if (self.synpred182()) :
+ alt90 = 1
+ if alt90 == 1:
+ # C.g:0:0: statement_list
+ self.following.append(self.FOLLOW_statement_list_in_macro_statement2170)
+ self.statement_list()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ # C.g:544:49: ( expression )?
+ alt91 = 2
+ LA91_0 = self.input.LA(1)
+
+ if ((IDENTIFIER <= LA91_0 <= FLOATING_POINT_LITERAL) or LA91_0 == 62 or LA91_0 == 66 or (68 <= LA91_0 <= 69) or (72 <= LA91_0 <= 74) or (77 <= LA91_0 <= 79)) :
+ alt91 = 1
+ if alt91 == 1:
+ # C.g:0:0: expression
+ self.following.append(self.FOLLOW_expression_in_macro_statement2173)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ self.match(self.input, 63, self.FOLLOW_63_in_macro_statement2176)
+ if self.failed:
+ return
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 64, macro_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end macro_statement
+
+
+ # $ANTLR start labeled_statement
+ # C.g:547:1: labeled_statement : ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement );
+ def labeled_statement(self, ):
+
+ labeled_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 65):
+ return
+
+ # C.g:548:2: ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement )
+ alt92 = 3
+ LA92 = self.input.LA(1)
+ if LA92 == IDENTIFIER:
+ alt92 = 1
+ elif LA92 == 106:
+ alt92 = 2
+ elif LA92 == 107:
+ alt92 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("547:1: labeled_statement : ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement );", 92, 0, self.input)
+
+ raise nvae
+
+ if alt92 == 1:
+ # C.g:548:4: IDENTIFIER ':' statement
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_labeled_statement2188)
+ if self.failed:
+ return
+ self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2190)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_labeled_statement2192)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt92 == 2:
+ # C.g:549:4: 'case' constant_expression ':' statement
+ self.match(self.input, 106, self.FOLLOW_106_in_labeled_statement2197)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_constant_expression_in_labeled_statement2199)
+ self.constant_expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2201)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_labeled_statement2203)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt92 == 3:
+ # C.g:550:4: 'default' ':' statement
+ self.match(self.input, 107, self.FOLLOW_107_in_labeled_statement2208)
+ if self.failed:
+ return
+ self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2210)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_labeled_statement2212)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 65, labeled_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end labeled_statement
+
+ class compound_statement_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start compound_statement
+ # C.g:553:1: compound_statement : '{' ( declaration )* ( statement_list )? '}' ;
+ def compound_statement(self, ):
+
+ retval = self.compound_statement_return()
+ retval.start = self.input.LT(1)
+ compound_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 66):
+ return retval
+
+ # C.g:554:2: ( '{' ( declaration )* ( statement_list )? '}' )
+ # C.g:554:4: '{' ( declaration )* ( statement_list )? '}'
+ self.match(self.input, 43, self.FOLLOW_43_in_compound_statement2223)
+ if self.failed:
+ return retval
+ # C.g:554:8: ( declaration )*
+ while True: #loop93
+ alt93 = 2
+ LA93 = self.input.LA(1)
+ if LA93 == IDENTIFIER:
+ LA93 = self.input.LA(2)
+ if LA93 == 62:
+ LA93_44 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_47 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 66:
+ LA93_48 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_49 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_50 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_51 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_52 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_53 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_54 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_55 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_56 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_57 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_58 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_59 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_60 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_61 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_62 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_63 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_64 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_65 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 26:
+ LA93 = self.input.LA(2)
+ if LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_86 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_87 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_88 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_89 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_90 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_91 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_92 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_93 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_94 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_95 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_96 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_97 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_98 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_99 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 66:
+ LA93_100 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_101 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_102 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_103 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_104 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_105 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_106 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_107 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_108 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_109 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_110 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_111 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_112 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_113 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_114 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_115 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_116 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_117 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_118 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_119 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_120 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_121 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_122 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_123 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_124 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 34:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_125 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_126 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_127 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_128 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_129 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_130 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_131 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_132 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_133 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_134 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_135 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_136 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_137 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_138 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_139 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_140 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_141 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_142 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_143 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_144 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 35:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_145 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_146 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_147 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_148 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_149 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_150 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_151 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_152 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_153 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_154 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_155 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_156 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_157 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_158 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_159 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_160 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_161 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_162 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_163 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_164 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 36:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_165 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_166 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_167 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_168 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_169 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_170 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_171 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_172 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_173 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_174 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_175 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_176 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_177 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_178 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_179 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_180 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_181 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_182 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_183 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_184 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 37:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_185 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_186 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_187 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_188 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_189 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_190 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_191 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_192 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_193 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_194 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_195 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_196 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_197 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_198 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_199 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_200 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_201 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_202 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_203 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_204 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 38:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_205 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_206 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_207 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_208 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_209 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_210 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_211 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_212 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_213 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_214 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_215 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_216 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_217 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_218 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_219 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_220 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_221 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_222 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_223 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_224 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 39:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_225 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_226 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_227 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_228 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_229 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_230 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_231 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_232 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_233 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_234 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_235 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_236 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_237 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_238 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_239 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_240 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_241 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_242 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_243 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_244 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 40:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_245 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_246 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_247 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_248 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_249 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_250 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_251 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_252 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_253 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_254 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_255 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_256 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_257 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_258 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_259 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_260 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_261 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_262 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_263 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_264 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 41:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_265 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_266 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_267 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_268 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_269 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_270 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_271 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_272 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_273 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_274 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_275 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_276 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_277 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_278 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_279 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_280 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_281 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_282 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_283 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_284 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 42:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_285 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_286 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_287 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_288 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_289 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_290 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_291 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_292 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_293 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_294 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_295 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_296 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_297 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_298 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_299 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_300 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_301 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_302 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_303 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_304 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_40 = self.input.LA(2)
+
+ if (LA93_40 == IDENTIFIER) :
+ LA93_305 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif (LA93_40 == 43) :
+ LA93_306 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+
+ elif LA93 == 48:
+ LA93_41 = self.input.LA(2)
+
+ if (LA93_41 == 43) :
+ LA93_307 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif (LA93_41 == IDENTIFIER) :
+ LA93_308 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 58 or LA93 == 59 or LA93 == 60 or LA93 == 61:
+ LA93 = self.input.LA(2)
+ if LA93 == 66:
+ LA93_309 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 58:
+ LA93_310 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 59:
+ LA93_311 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 60:
+ LA93_312 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == IDENTIFIER:
+ LA93_313 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 62:
+ LA93_314 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 25:
+ LA93_315 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
+ LA93_316 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 34:
+ LA93_317 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 35:
+ LA93_318 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 36:
+ LA93_319 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 37:
+ LA93_320 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 38:
+ LA93_321 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 39:
+ LA93_322 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 40:
+ LA93_323 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 41:
+ LA93_324 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 42:
+ LA93_325 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 45 or LA93 == 46:
+ LA93_326 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 48:
+ LA93_327 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+ elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
+ LA93_328 = self.input.LA(3)
+
+ if (self.synpred186()) :
+ alt93 = 1
+
+
+
+
+ if alt93 == 1:
+ # C.g:0:0: declaration
+ self.following.append(self.FOLLOW_declaration_in_compound_statement2225)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+ else:
+ break #loop93
+
+
+ # C.g:554:21: ( statement_list )?
+ alt94 = 2
+ LA94_0 = self.input.LA(1)
+
+ if ((IDENTIFIER <= LA94_0 <= FLOATING_POINT_LITERAL) or (25 <= LA94_0 <= 26) or (29 <= LA94_0 <= 43) or (45 <= LA94_0 <= 46) or (48 <= LA94_0 <= 62) or LA94_0 == 66 or (68 <= LA94_0 <= 69) or (72 <= LA94_0 <= 74) or (77 <= LA94_0 <= 79) or (103 <= LA94_0 <= 108) or (110 <= LA94_0 <= 117)) :
+ alt94 = 1
+ if alt94 == 1:
+ # C.g:0:0: statement_list
+ self.following.append(self.FOLLOW_statement_list_in_compound_statement2228)
+ self.statement_list()
+ self.following.pop()
+ if self.failed:
+ return retval
+
+
+
+ self.match(self.input, 44, self.FOLLOW_44_in_compound_statement2231)
+ if self.failed:
+ return retval
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 66, compound_statement_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end compound_statement
+
+
+ # $ANTLR start statement_list
+ # C.g:557:1: statement_list : ( statement )+ ;
+ def statement_list(self, ):
+
+ statement_list_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 67):
+ return
+
+ # C.g:558:2: ( ( statement )+ )
+ # C.g:558:4: ( statement )+
+ # C.g:558:4: ( statement )+
+ cnt95 = 0
+ while True: #loop95
+ alt95 = 2
+ LA95 = self.input.LA(1)
+ if LA95 == IDENTIFIER:
+ LA95 = self.input.LA(2)
+ if LA95 == 62:
+ LA95_46 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25 or LA95 == 29 or LA95 == 30 or LA95 == 31 or LA95 == 32 or LA95 == 33 or LA95 == 34 or LA95 == 35 or LA95 == 36 or LA95 == 37 or LA95 == 38 or LA95 == 39 or LA95 == 40 or LA95 == 41 or LA95 == 42 or LA95 == 45 or LA95 == 46 or LA95 == 47 or LA95 == 48 or LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60 or LA95 == 61:
+ alt95 = 1
+ elif LA95 == STRING_LITERAL:
+ LA95_48 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == IDENTIFIER:
+ LA95_49 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 64:
+ LA95_50 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_51 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_52 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_53 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_54 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_55 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_56 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_57 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_58 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_59 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_60 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_61 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_62 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_63 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_64 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_65 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_66 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_67 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_68 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_69 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_88 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == HEX_LITERAL:
+ LA95 = self.input.LA(2)
+ if LA95 == 64:
+ LA95_89 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_90 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_91 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_92 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_93 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_94 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_95 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_96 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_97 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_98 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_99 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_100 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_101 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_102 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_103 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_104 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_105 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_106 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_107 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_108 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_109 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_110 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25:
+ alt95 = 1
+
+ elif LA95 == OCTAL_LITERAL:
+ LA95 = self.input.LA(2)
+ if LA95 == 64:
+ LA95_113 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_114 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_115 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_116 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_117 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_118 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_119 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_120 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_121 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_122 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_123 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_124 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_125 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_126 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_127 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_128 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_129 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_130 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_131 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_132 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_133 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_135 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25:
+ alt95 = 1
+
+ elif LA95 == DECIMAL_LITERAL:
+ LA95 = self.input.LA(2)
+ if LA95 == 64:
+ LA95_137 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_138 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_139 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_140 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_141 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_142 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_143 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_144 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_145 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_146 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_147 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_148 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_149 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_150 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_151 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_152 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_153 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_154 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_155 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_156 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_157 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_158 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25:
+ alt95 = 1
+
+ elif LA95 == CHARACTER_LITERAL:
+ LA95 = self.input.LA(2)
+ if LA95 == 64:
+ LA95_161 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_162 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_163 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_164 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_165 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_166 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_167 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_168 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_169 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_170 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_171 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_172 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_173 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_174 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_175 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_176 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_177 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_178 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_179 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_180 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_181 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_182 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25:
+ alt95 = 1
+
+ elif LA95 == STRING_LITERAL:
+ LA95 = self.input.LA(2)
+ if LA95 == IDENTIFIER:
+ LA95_185 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 64:
+ LA95_186 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_187 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_188 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_189 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_190 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_191 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_192 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_193 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_194 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_195 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_196 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_197 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_198 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_199 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_200 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_201 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_202 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_203 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_204 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_205 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_206 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25:
+ alt95 = 1
+ elif LA95 == STRING_LITERAL:
+ LA95_208 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_209 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == FLOATING_POINT_LITERAL:
+ LA95 = self.input.LA(2)
+ if LA95 == 64:
+ LA95_211 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_212 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 75:
+ LA95_213 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66:
+ LA95_214 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 76:
+ LA95_215 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_216 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_217 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 70:
+ LA95_218 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 71:
+ LA95_219 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 68:
+ LA95_220 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 69:
+ LA95_221 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 101 or LA95 == 102:
+ LA95_222 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
+ LA95_223 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 95 or LA95 == 96:
+ LA95_224 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 77:
+ LA95_225 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 94:
+ LA95_226 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 93:
+ LA95_227 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 92:
+ LA95_228 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 91:
+ LA95_229 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 90:
+ LA95_230 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 27:
+ LA95_231 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 25:
+ alt95 = 1
+ elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
+ LA95_234 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == 62:
+ LA95 = self.input.LA(2)
+ if LA95 == IDENTIFIER:
+ LA95_235 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == HEX_LITERAL:
+ LA95_236 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == OCTAL_LITERAL:
+ LA95_237 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == DECIMAL_LITERAL:
+ LA95_238 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == CHARACTER_LITERAL:
+ LA95_239 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == STRING_LITERAL:
+ LA95_240 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == FLOATING_POINT_LITERAL:
+ LA95_241 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_242 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_243 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_244 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
+ LA95_245 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 74:
+ LA95_246 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60 or LA95 == 61:
+ LA95_247 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 34:
+ LA95_248 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 35:
+ LA95_249 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 36:
+ LA95_250 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 37:
+ LA95_251 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 38:
+ LA95_252 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 39:
+ LA95_253 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 40:
+ LA95_254 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 41:
+ LA95_255 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 42:
+ LA95_256 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 45 or LA95 == 46:
+ LA95_257 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 48:
+ LA95_258 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == 72:
+ LA95 = self.input.LA(2)
+ if LA95 == IDENTIFIER:
+ LA95_259 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == HEX_LITERAL:
+ LA95_260 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == OCTAL_LITERAL:
+ LA95_261 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == DECIMAL_LITERAL:
+ LA95_262 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == CHARACTER_LITERAL:
+ LA95_263 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == STRING_LITERAL:
+ LA95_264 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == FLOATING_POINT_LITERAL:
+ LA95_265 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_266 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_267 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_268 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
+ LA95_269 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 74:
+ LA95_270 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == 73:
+ LA95 = self.input.LA(2)
+ if LA95 == IDENTIFIER:
+ LA95_271 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == HEX_LITERAL:
+ LA95_272 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == OCTAL_LITERAL:
+ LA95_273 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == DECIMAL_LITERAL:
+ LA95_274 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == CHARACTER_LITERAL:
+ LA95_275 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == STRING_LITERAL:
+ LA95_276 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == FLOATING_POINT_LITERAL:
+ LA95_277 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 62:
+ LA95_278 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_279 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_280 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
+ LA95_281 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 74:
+ LA95_282 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
+ LA95 = self.input.LA(2)
+ if LA95 == 62:
+ LA95_283 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == IDENTIFIER:
+ LA95_284 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == HEX_LITERAL:
+ LA95_285 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == OCTAL_LITERAL:
+ LA95_286 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == DECIMAL_LITERAL:
+ LA95_287 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == CHARACTER_LITERAL:
+ LA95_288 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == STRING_LITERAL:
+ LA95_289 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == FLOATING_POINT_LITERAL:
+ LA95_290 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_291 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_292 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
+ LA95_293 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 74:
+ LA95_294 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == 74:
+ LA95 = self.input.LA(2)
+ if LA95 == 62:
+ LA95_295 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == IDENTIFIER:
+ LA95_296 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == HEX_LITERAL:
+ LA95_297 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == OCTAL_LITERAL:
+ LA95_298 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == DECIMAL_LITERAL:
+ LA95_299 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == CHARACTER_LITERAL:
+ LA95_300 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == STRING_LITERAL:
+ LA95_301 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == FLOATING_POINT_LITERAL:
+ LA95_302 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 72:
+ LA95_303 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 73:
+ LA95_304 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
+ LA95_305 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+ elif LA95 == 74:
+ LA95_306 = self.input.LA(3)
+
+ if (self.synpred188()) :
+ alt95 = 1
+
+
+
+ elif LA95 == 25 or LA95 == 26 or LA95 == 29 or LA95 == 30 or LA95 == 31 or LA95 == 32 or LA95 == 33 or LA95 == 34 or LA95 == 35 or LA95 == 36 or LA95 == 37 or LA95 == 38 or LA95 == 39 or LA95 == 40 or LA95 == 41 or LA95 == 42 or LA95 == 43 or LA95 == 45 or LA95 == 46 or LA95 == 48 or LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60 or LA95 == 61 or LA95 == 103 or LA95 == 104 or LA95 == 105 or LA95 == 106 or LA95 == 107 or LA95 == 108 or LA95 == 110 or LA95 == 111 or LA95 == 112 or LA95 == 113 or LA95 == 114 or LA95 == 115 or LA95 == 116 or LA95 == 117:
+ alt95 = 1
+
+ if alt95 == 1:
+ # C.g:0:0: statement
+ self.following.append(self.FOLLOW_statement_in_statement_list2242)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ if cnt95 >= 1:
+ break #loop95
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(95, self.input)
+ raise eee
+
+ cnt95 += 1
+
+
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 67, statement_list_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end statement_list
+
+ class expression_statement_return(object):
+ def __init__(self):
+ self.start = None
+ self.stop = None
+
+
+
+ # $ANTLR start expression_statement
+ # C.g:561:1: expression_statement : ( ';' | expression ';' );
+ def expression_statement(self, ):
+
+ retval = self.expression_statement_return()
+ retval.start = self.input.LT(1)
+ expression_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 68):
+ return retval
+
+ # C.g:562:2: ( ';' | expression ';' )
+ alt96 = 2
+ LA96_0 = self.input.LA(1)
+
+ if (LA96_0 == 25) :
+ alt96 = 1
+ elif ((IDENTIFIER <= LA96_0 <= FLOATING_POINT_LITERAL) or LA96_0 == 62 or LA96_0 == 66 or (68 <= LA96_0 <= 69) or (72 <= LA96_0 <= 74) or (77 <= LA96_0 <= 79)) :
+ alt96 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return retval
+
+ nvae = NoViableAltException("561:1: expression_statement : ( ';' | expression ';' );", 96, 0, self.input)
+
+ raise nvae
+
+ if alt96 == 1:
+ # C.g:562:4: ';'
+ self.match(self.input, 25, self.FOLLOW_25_in_expression_statement2254)
+ if self.failed:
+ return retval
+
+
+ elif alt96 == 2:
+ # C.g:563:4: expression ';'
+ self.following.append(self.FOLLOW_expression_in_expression_statement2259)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return retval
+ self.match(self.input, 25, self.FOLLOW_25_in_expression_statement2261)
+ if self.failed:
+ return retval
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 68, expression_statement_StartIndex)
+
+ pass
+
+ return retval
+
+ # $ANTLR end expression_statement
+
+
+ # $ANTLR start selection_statement
+ # C.g:566:1: selection_statement : ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement );
+ def selection_statement(self, ):
+
+ selection_statement_StartIndex = self.input.index()
+ e = None
+
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 69):
+ return
+
+ # C.g:567:2: ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement )
+ alt98 = 2
+ LA98_0 = self.input.LA(1)
+
+ if (LA98_0 == 108) :
+ alt98 = 1
+ elif (LA98_0 == 110) :
+ alt98 = 2
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("566:1: selection_statement : ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement );", 98, 0, self.input)
+
+ raise nvae
+
+ if alt98 == 1:
+ # C.g:567:4: 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )?
+ self.match(self.input, 108, self.FOLLOW_108_in_selection_statement2272)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2274)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_selection_statement2278)
+ e = self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_selection_statement2280)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
+
+ self.following.append(self.FOLLOW_statement_in_selection_statement2284)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:567:167: ( options {k=1; backtrack=false; } : 'else' statement )?
+ alt97 = 2
+ LA97_0 = self.input.LA(1)
+
+ if (LA97_0 == 109) :
+ alt97 = 1
+ if alt97 == 1:
+ # C.g:567:200: 'else' statement
+ self.match(self.input, 109, self.FOLLOW_109_in_selection_statement2299)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_selection_statement2301)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ elif alt98 == 2:
+ # C.g:568:4: 'switch' '(' expression ')' statement
+ self.match(self.input, 110, self.FOLLOW_110_in_selection_statement2308)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2310)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_selection_statement2312)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_selection_statement2314)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_selection_statement2316)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 69, selection_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end selection_statement
+
+
+ # $ANTLR start iteration_statement
+ # C.g:571:1: iteration_statement : ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement );
+ def iteration_statement(self, ):
+
+ iteration_statement_StartIndex = self.input.index()
+ e = None
+
+
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 70):
+ return
+
+ # C.g:572:2: ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement )
+ alt100 = 3
+ LA100 = self.input.LA(1)
+ if LA100 == 111:
+ alt100 = 1
+ elif LA100 == 112:
+ alt100 = 2
+ elif LA100 == 113:
+ alt100 = 3
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("571:1: iteration_statement : ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement );", 100, 0, self.input)
+
+ raise nvae
+
+ if alt100 == 1:
+ # C.g:572:4: 'while' '(' e= expression ')' statement
+ self.match(self.input, 111, self.FOLLOW_111_in_iteration_statement2327)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2329)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_iteration_statement2333)
+ e = self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2335)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_iteration_statement2337)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
+
+
+
+ elif alt100 == 2:
+ # C.g:573:4: 'do' statement 'while' '(' e= expression ')' ';'
+ self.match(self.input, 112, self.FOLLOW_112_in_iteration_statement2344)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_iteration_statement2346)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 111, self.FOLLOW_111_in_iteration_statement2348)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2350)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_iteration_statement2354)
+ e = self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2356)
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_iteration_statement2358)
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
+
+
+
+ elif alt100 == 3:
+ # C.g:574:4: 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement
+ self.match(self.input, 113, self.FOLLOW_113_in_iteration_statement2365)
+ if self.failed:
+ return
+ self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2367)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2369)
+ self.expression_statement()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2373)
+ e = self.expression_statement()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:574:58: ( expression )?
+ alt99 = 2
+ LA99_0 = self.input.LA(1)
+
+ if ((IDENTIFIER <= LA99_0 <= FLOATING_POINT_LITERAL) or LA99_0 == 62 or LA99_0 == 66 or (68 <= LA99_0 <= 69) or (72 <= LA99_0 <= 74) or (77 <= LA99_0 <= 79)) :
+ alt99 = 1
+ if alt99 == 1:
+ # C.g:0:0: expression
+ self.following.append(self.FOLLOW_expression_in_iteration_statement2375)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2378)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_statement_in_iteration_statement2380)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+ if self.backtracking == 0:
+ self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
+
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 70, iteration_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end iteration_statement
+
+
+ # $ANTLR start jump_statement
+ # C.g:577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );
+ def jump_statement(self, ):
+
+ jump_statement_StartIndex = self.input.index()
+ try:
+ try:
+ if self.backtracking > 0 and self.alreadyParsedRule(self.input, 71):
+ return
+
+ # C.g:578:2: ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' )
+ alt101 = 5
+ LA101 = self.input.LA(1)
+ if LA101 == 114:
+ alt101 = 1
+ elif LA101 == 115:
+ alt101 = 2
+ elif LA101 == 116:
+ alt101 = 3
+ elif LA101 == 117:
+ LA101_4 = self.input.LA(2)
+
+ if (LA101_4 == 25) :
+ alt101 = 4
+ elif ((IDENTIFIER <= LA101_4 <= FLOATING_POINT_LITERAL) or LA101_4 == 62 or LA101_4 == 66 or (68 <= LA101_4 <= 69) or (72 <= LA101_4 <= 74) or (77 <= LA101_4 <= 79)) :
+ alt101 = 5
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 4, self.input)
+
+ raise nvae
+
+ else:
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ nvae = NoViableAltException("577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 0, self.input)
+
+ raise nvae
+
+ if alt101 == 1:
+ # C.g:578:4: 'goto' IDENTIFIER ';'
+ self.match(self.input, 114, self.FOLLOW_114_in_jump_statement2393)
+ if self.failed:
+ return
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_jump_statement2395)
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2397)
+ if self.failed:
+ return
+
+
+ elif alt101 == 2:
+ # C.g:579:4: 'continue' ';'
+ self.match(self.input, 115, self.FOLLOW_115_in_jump_statement2402)
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2404)
+ if self.failed:
+ return
+
+
+ elif alt101 == 3:
+ # C.g:580:4: 'break' ';'
+ self.match(self.input, 116, self.FOLLOW_116_in_jump_statement2409)
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2411)
+ if self.failed:
+ return
+
+
+ elif alt101 == 4:
+ # C.g:581:4: 'return' ';'
+ self.match(self.input, 117, self.FOLLOW_117_in_jump_statement2416)
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2418)
+ if self.failed:
+ return
+
+
+ elif alt101 == 5:
+ # C.g:582:4: 'return' expression ';'
+ self.match(self.input, 117, self.FOLLOW_117_in_jump_statement2423)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_expression_in_jump_statement2425)
+ self.expression()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2427)
+ if self.failed:
+ return
+
+
+
+ except RecognitionException as re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+ if self.backtracking > 0:
+ self.memoize(self.input, 71, jump_statement_StartIndex)
+
+ pass
+
+ return
+
+ # $ANTLR end jump_statement
+
+ # $ANTLR start synpred2
+ def synpred2_fragment(self, ):
+ # C.g:119:6: ( declaration_specifiers )
+ # C.g:119:6: declaration_specifiers
+ self.following.append(self.FOLLOW_declaration_specifiers_in_synpred2100)
+ self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred2
+
+
+
+ # $ANTLR start synpred4
+ def synpred4_fragment(self, ):
+ # C.g:119:4: ( ( declaration_specifiers )? declarator ( declaration )* '{' )
+ # C.g:119:6: ( declaration_specifiers )? declarator ( declaration )* '{'
+ # C.g:119:6: ( declaration_specifiers )?
+ alt102 = 2
+ LA102 = self.input.LA(1)
+ if LA102 == 29 or LA102 == 30 or LA102 == 31 or LA102 == 32 or LA102 == 33 or LA102 == 34 or LA102 == 35 or LA102 == 36 or LA102 == 37 or LA102 == 38 or LA102 == 39 or LA102 == 40 or LA102 == 41 or LA102 == 42 or LA102 == 45 or LA102 == 46 or LA102 == 48 or LA102 == 49 or LA102 == 50 or LA102 == 51 or LA102 == 52 or LA102 == 53 or LA102 == 54 or LA102 == 55 or LA102 == 56 or LA102 == 57 or LA102 == 61:
+ alt102 = 1
+ elif LA102 == IDENTIFIER:
+ LA102 = self.input.LA(2)
+ if LA102 == 62:
+ LA102_21 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 29 or LA102 == 30 or LA102 == 31 or LA102 == 32 or LA102 == 33:
+ LA102_23 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 34:
+ LA102_24 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 35:
+ LA102_25 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 36:
+ LA102_26 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 37:
+ LA102_27 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 38:
+ LA102_28 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 39:
+ LA102_29 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 40:
+ LA102_30 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 41:
+ LA102_31 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 42:
+ LA102_32 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 45 or LA102 == 46:
+ LA102_33 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 48:
+ LA102_34 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == IDENTIFIER:
+ LA102_35 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 58:
+ LA102_36 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 66:
+ alt102 = 1
+ elif LA102 == 59:
+ LA102_39 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 60:
+ LA102_40 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 49 or LA102 == 50 or LA102 == 51 or LA102 == 52 or LA102 == 53 or LA102 == 54 or LA102 == 55 or LA102 == 56 or LA102 == 57 or LA102 == 61:
+ LA102_41 = self.input.LA(3)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 58:
+ LA102_14 = self.input.LA(2)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 59:
+ LA102_16 = self.input.LA(2)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ elif LA102 == 60:
+ LA102_17 = self.input.LA(2)
+
+ if (self.synpred2()) :
+ alt102 = 1
+ if alt102 == 1:
+ # C.g:0:0: declaration_specifiers
+ self.following.append(self.FOLLOW_declaration_specifiers_in_synpred4100)
+ self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ self.following.append(self.FOLLOW_declarator_in_synpred4103)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:119:41: ( declaration )*
+ while True: #loop103
+ alt103 = 2
+ LA103_0 = self.input.LA(1)
+
+ if (LA103_0 == IDENTIFIER or LA103_0 == 26 or (29 <= LA103_0 <= 42) or (45 <= LA103_0 <= 46) or (48 <= LA103_0 <= 61)) :
+ alt103 = 1
+
+
+ if alt103 == 1:
+ # C.g:0:0: declaration
+ self.following.append(self.FOLLOW_declaration_in_synpred4105)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop103
+
+
+ self.match(self.input, 43, self.FOLLOW_43_in_synpred4108)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred4
+
+
+
+ # $ANTLR start synpred5
+ def synpred5_fragment(self, ):
+ # C.g:120:4: ( declaration )
+ # C.g:120:4: declaration
+ self.following.append(self.FOLLOW_declaration_in_synpred5118)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred5
+
+
+
+ # $ANTLR start synpred7
+ def synpred7_fragment(self, ):
+ # C.g:146:6: ( declaration_specifiers )
+ # C.g:146:6: declaration_specifiers
+ self.following.append(self.FOLLOW_declaration_specifiers_in_synpred7157)
+ self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred7
+
+
+
+ # $ANTLR start synpred10
+ def synpred10_fragment(self, ):
+ # C.g:167:18: ( declaration_specifiers )
+ # C.g:167:18: declaration_specifiers
+ self.following.append(self.FOLLOW_declaration_specifiers_in_synpred10207)
+ self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred10
+
+
+
+ # $ANTLR start synpred14
+ def synpred14_fragment(self, ):
+ # C.g:184:7: ( type_specifier )
+ # C.g:184:7: type_specifier
+ self.following.append(self.FOLLOW_type_specifier_in_synpred14272)
+ self.type_specifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred14
+
+
+
+ # $ANTLR start synpred15
+ def synpred15_fragment(self, ):
+ # C.g:185:13: ( type_qualifier )
+ # C.g:185:13: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_synpred15286)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred15
+
+
+
+ # $ANTLR start synpred33
+ def synpred33_fragment(self, ):
+ # C.g:225:16: ( type_qualifier )
+ # C.g:225:16: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_synpred33444)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred33
+
+
+
+ # $ANTLR start synpred34
+ def synpred34_fragment(self, ):
+ # C.g:225:4: ( IDENTIFIER ( type_qualifier )* declarator )
+ # C.g:225:5: IDENTIFIER ( type_qualifier )* declarator
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred34442)
+ if self.failed:
+ return
+ # C.g:225:16: ( type_qualifier )*
+ while True: #loop106
+ alt106 = 2
+ LA106 = self.input.LA(1)
+ if LA106 == 58:
+ LA106_2 = self.input.LA(2)
+
+ if (self.synpred33()) :
+ alt106 = 1
+
+
+ elif LA106 == 59:
+ LA106_3 = self.input.LA(2)
+
+ if (self.synpred33()) :
+ alt106 = 1
+
+
+ elif LA106 == 60:
+ LA106_4 = self.input.LA(2)
+
+ if (self.synpred33()) :
+ alt106 = 1
+
+
+ elif LA106 == 49 or LA106 == 50 or LA106 == 51 or LA106 == 52 or LA106 == 53 or LA106 == 54 or LA106 == 55 or LA106 == 56 or LA106 == 57 or LA106 == 61:
+ alt106 = 1
+
+ if alt106 == 1:
+ # C.g:0:0: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_synpred34444)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop106
+
+
+ self.following.append(self.FOLLOW_declarator_in_synpred34447)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred34
+
+
+
+ # $ANTLR start synpred39
+ def synpred39_fragment(self, ):
+ # C.g:253:6: ( type_qualifier )
+ # C.g:253:6: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_synpred39566)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred39
+
+
+
+ # $ANTLR start synpred40
+ def synpred40_fragment(self, ):
+ # C.g:253:23: ( type_specifier )
+ # C.g:253:23: type_specifier
+ self.following.append(self.FOLLOW_type_specifier_in_synpred40570)
+ self.type_specifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred40
+
+
+
+ # $ANTLR start synpred66
+ def synpred66_fragment(self, ):
+ # C.g:297:4: ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator )
+ # C.g:297:4: ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator
+ # C.g:297:4: ( pointer )?
+ alt111 = 2
+ LA111_0 = self.input.LA(1)
+
+ if (LA111_0 == 66) :
+ alt111 = 1
+ if alt111 == 1:
+ # C.g:0:0: pointer
+ self.following.append(self.FOLLOW_pointer_in_synpred66784)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+ # C.g:297:13: ( 'EFIAPI' )?
+ alt112 = 2
+ LA112_0 = self.input.LA(1)
+
+ if (LA112_0 == 58) :
+ alt112 = 1
+ if alt112 == 1:
+ # C.g:297:14: 'EFIAPI'
+ self.match(self.input, 58, self.FOLLOW_58_in_synpred66788)
+ if self.failed:
+ return
+
+
+
+ # C.g:297:25: ( 'EFI_BOOTSERVICE' )?
+ alt113 = 2
+ LA113_0 = self.input.LA(1)
+
+ if (LA113_0 == 59) :
+ alt113 = 1
+ if alt113 == 1:
+ # C.g:297:26: 'EFI_BOOTSERVICE'
+ self.match(self.input, 59, self.FOLLOW_59_in_synpred66793)
+ if self.failed:
+ return
+
+
+
+ # C.g:297:46: ( 'EFI_RUNTIMESERVICE' )?
+ alt114 = 2
+ LA114_0 = self.input.LA(1)
+
+ if (LA114_0 == 60) :
+ alt114 = 1
+ if alt114 == 1:
+ # C.g:297:47: 'EFI_RUNTIMESERVICE'
+ self.match(self.input, 60, self.FOLLOW_60_in_synpred66798)
+ if self.failed:
+ return
+
+
+
+ self.following.append(self.FOLLOW_direct_declarator_in_synpred66802)
+ self.direct_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred66
+
+
+
+ # $ANTLR start synpred67
+ def synpred67_fragment(self, ):
+ # C.g:303:15: ( declarator_suffix )
+ # C.g:303:15: declarator_suffix
+ self.following.append(self.FOLLOW_declarator_suffix_in_synpred67821)
+ self.declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred67
+
+
+
+ # $ANTLR start synpred69
+ def synpred69_fragment(self, ):
+ # C.g:304:9: ( 'EFIAPI' )
+ # C.g:304:9: 'EFIAPI'
+ self.match(self.input, 58, self.FOLLOW_58_in_synpred69830)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred69
+
+
+
+ # $ANTLR start synpred70
+ def synpred70_fragment(self, ):
+ # C.g:304:35: ( declarator_suffix )
+ # C.g:304:35: declarator_suffix
+ self.following.append(self.FOLLOW_declarator_suffix_in_synpred70838)
+ self.declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred70
+
+
+
+ # $ANTLR start synpred73
+ def synpred73_fragment(self, ):
+ # C.g:310:9: ( '(' parameter_type_list ')' )
+ # C.g:310:9: '(' parameter_type_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_synpred73878)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_parameter_type_list_in_synpred73880)
+ self.parameter_type_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_synpred73882)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred73
+
+
+
+ # $ANTLR start synpred74
+ def synpred74_fragment(self, ):
+ # C.g:311:9: ( '(' identifier_list ')' )
+ # C.g:311:9: '(' identifier_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_synpred74892)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_identifier_list_in_synpred74894)
+ self.identifier_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_synpred74896)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred74
+
+
+
+ # $ANTLR start synpred75
+ def synpred75_fragment(self, ):
+ # C.g:316:8: ( type_qualifier )
+ # C.g:316:8: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_synpred75921)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred75
+
+
+
+ # $ANTLR start synpred76
+ def synpred76_fragment(self, ):
+ # C.g:316:24: ( pointer )
+ # C.g:316:24: pointer
+ self.following.append(self.FOLLOW_pointer_in_synpred76924)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred76
+
+
+
+ # $ANTLR start synpred77
+ def synpred77_fragment(self, ):
+ # C.g:316:4: ( '*' ( type_qualifier )+ ( pointer )? )
+ # C.g:316:4: '*' ( type_qualifier )+ ( pointer )?
+ self.match(self.input, 66, self.FOLLOW_66_in_synpred77919)
+ if self.failed:
+ return
+ # C.g:316:8: ( type_qualifier )+
+ cnt116 = 0
+ while True: #loop116
+ alt116 = 2
+ LA116_0 = self.input.LA(1)
+
+ if ((49 <= LA116_0 <= 61)) :
+ alt116 = 1
+
+
+ if alt116 == 1:
+ # C.g:0:0: type_qualifier
+ self.following.append(self.FOLLOW_type_qualifier_in_synpred77921)
+ self.type_qualifier()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ if cnt116 >= 1:
+ break #loop116
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(116, self.input)
+ raise eee
+
+ cnt116 += 1
+
+
+ # C.g:316:24: ( pointer )?
+ alt117 = 2
+ LA117_0 = self.input.LA(1)
+
+ if (LA117_0 == 66) :
+ alt117 = 1
+ if alt117 == 1:
+ # C.g:0:0: pointer
+ self.following.append(self.FOLLOW_pointer_in_synpred77924)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ # $ANTLR end synpred77
+
+
+
+ # $ANTLR start synpred78
+ def synpred78_fragment(self, ):
+ # C.g:317:4: ( '*' pointer )
+ # C.g:317:4: '*' pointer
+ self.match(self.input, 66, self.FOLLOW_66_in_synpred78930)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_pointer_in_synpred78932)
+ self.pointer()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred78
+
+
+
+ # $ANTLR start synpred81
+ def synpred81_fragment(self, ):
+ # C.g:326:32: ( 'OPTIONAL' )
+ # C.g:326:32: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_synpred81977)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred81
+
+
+
+ # $ANTLR start synpred82
+ def synpred82_fragment(self, ):
+ # C.g:326:27: ( ',' ( 'OPTIONAL' )? parameter_declaration )
+ # C.g:326:27: ',' ( 'OPTIONAL' )? parameter_declaration
+ self.match(self.input, 27, self.FOLLOW_27_in_synpred82974)
+ if self.failed:
+ return
+ # C.g:326:31: ( 'OPTIONAL' )?
+ alt119 = 2
+ LA119_0 = self.input.LA(1)
+
+ if (LA119_0 == 53) :
+ LA119_1 = self.input.LA(2)
+
+ if (self.synpred81()) :
+ alt119 = 1
+ if alt119 == 1:
+ # C.g:326:32: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_synpred82977)
+ if self.failed:
+ return
+
+
+
+ self.following.append(self.FOLLOW_parameter_declaration_in_synpred82981)
+ self.parameter_declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred82
+
+
+
+ # $ANTLR start synpred83
+ def synpred83_fragment(self, ):
+ # C.g:330:28: ( declarator )
+ # C.g:330:28: declarator
+ self.following.append(self.FOLLOW_declarator_in_synpred83997)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred83
+
+
+
+ # $ANTLR start synpred84
+ def synpred84_fragment(self, ):
+ # C.g:330:39: ( abstract_declarator )
+ # C.g:330:39: abstract_declarator
+ self.following.append(self.FOLLOW_abstract_declarator_in_synpred84999)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred84
+
+
+
+ # $ANTLR start synpred86
+ def synpred86_fragment(self, ):
+ # C.g:330:4: ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? )
+ # C.g:330:4: declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )?
+ self.following.append(self.FOLLOW_declaration_specifiers_in_synpred86994)
+ self.declaration_specifiers()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:330:27: ( declarator | abstract_declarator )*
+ while True: #loop120
+ alt120 = 3
+ LA120 = self.input.LA(1)
+ if LA120 == 66:
+ LA120_3 = self.input.LA(2)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+ elif LA120 == IDENTIFIER or LA120 == 58 or LA120 == 59 or LA120 == 60:
+ alt120 = 1
+ elif LA120 == 62:
+ LA120 = self.input.LA(2)
+ if LA120 == 29 or LA120 == 30 or LA120 == 31 or LA120 == 32 or LA120 == 33 or LA120 == 34 or LA120 == 35 or LA120 == 36 or LA120 == 37 or LA120 == 38 or LA120 == 39 or LA120 == 40 or LA120 == 41 or LA120 == 42 or LA120 == 45 or LA120 == 46 or LA120 == 48 or LA120 == 49 or LA120 == 50 or LA120 == 51 or LA120 == 52 or LA120 == 53 or LA120 == 54 or LA120 == 55 or LA120 == 56 or LA120 == 57 or LA120 == 61 or LA120 == 63 or LA120 == 64:
+ alt120 = 2
+ elif LA120 == 58:
+ LA120_21 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+ elif LA120 == 66:
+ LA120_22 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+ elif LA120 == 59:
+ LA120_23 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+ elif LA120 == 60:
+ LA120_24 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+ elif LA120 == IDENTIFIER:
+ LA120_25 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+ elif LA120 == 62:
+ LA120_26 = self.input.LA(3)
+
+ if (self.synpred83()) :
+ alt120 = 1
+ elif (self.synpred84()) :
+ alt120 = 2
+
+
+
+ elif LA120 == 64:
+ alt120 = 2
+
+ if alt120 == 1:
+ # C.g:330:28: declarator
+ self.following.append(self.FOLLOW_declarator_in_synpred86997)
+ self.declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ elif alt120 == 2:
+ # C.g:330:39: abstract_declarator
+ self.following.append(self.FOLLOW_abstract_declarator_in_synpred86999)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ else:
+ break #loop120
+
+
+ # C.g:330:61: ( 'OPTIONAL' )?
+ alt121 = 2
+ LA121_0 = self.input.LA(1)
+
+ if (LA121_0 == 53) :
+ alt121 = 1
+ if alt121 == 1:
+ # C.g:330:62: 'OPTIONAL'
+ self.match(self.input, 53, self.FOLLOW_53_in_synpred861004)
+ if self.failed:
+ return
+
+
+
+
+
+ # $ANTLR end synpred86
+
+
+
+ # $ANTLR start synpred90
+ def synpred90_fragment(self, ):
+ # C.g:341:4: ( specifier_qualifier_list ( abstract_declarator )? )
+ # C.g:341:4: specifier_qualifier_list ( abstract_declarator )?
+ self.following.append(self.FOLLOW_specifier_qualifier_list_in_synpred901046)
+ self.specifier_qualifier_list()
+ self.following.pop()
+ if self.failed:
+ return
+ # C.g:341:29: ( abstract_declarator )?
+ alt122 = 2
+ LA122_0 = self.input.LA(1)
+
+ if (LA122_0 == 62 or LA122_0 == 64 or LA122_0 == 66) :
+ alt122 = 1
+ if alt122 == 1:
+ # C.g:0:0: abstract_declarator
+ self.following.append(self.FOLLOW_abstract_declarator_in_synpred901048)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+
+
+
+ # $ANTLR end synpred90
+
+
+
+ # $ANTLR start synpred91
+ def synpred91_fragment(self, ):
+ # C.g:346:12: ( direct_abstract_declarator )
+ # C.g:346:12: direct_abstract_declarator
+ self.following.append(self.FOLLOW_direct_abstract_declarator_in_synpred911067)
+ self.direct_abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred91
+
+
+
+ # $ANTLR start synpred93
+ def synpred93_fragment(self, ):
+ # C.g:351:6: ( '(' abstract_declarator ')' )
+ # C.g:351:6: '(' abstract_declarator ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_synpred931086)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_abstract_declarator_in_synpred931088)
+ self.abstract_declarator()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_synpred931090)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred93
+
+
+
+ # $ANTLR start synpred94
+ def synpred94_fragment(self, ):
+ # C.g:351:65: ( abstract_declarator_suffix )
+ # C.g:351:65: abstract_declarator_suffix
+ self.following.append(self.FOLLOW_abstract_declarator_suffix_in_synpred941098)
+ self.abstract_declarator_suffix()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred94
+
+
+
+ # $ANTLR start synpred109
+ def synpred109_fragment(self, ):
+ # C.g:386:4: ( '(' type_name ')' cast_expression )
+ # C.g:386:4: '(' type_name ')' cast_expression
+ self.match(self.input, 62, self.FOLLOW_62_in_synpred1091282)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_type_name_in_synpred1091284)
+ self.type_name()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_synpred1091286)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_cast_expression_in_synpred1091288)
+ self.cast_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred109
+
+
+
+ # $ANTLR start synpred114
+ def synpred114_fragment(self, ):
+ # C.g:395:4: ( 'sizeof' unary_expression )
+ # C.g:395:4: 'sizeof' unary_expression
+ self.match(self.input, 74, self.FOLLOW_74_in_synpred1141330)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_unary_expression_in_synpred1141332)
+ self.unary_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred114
+
+
+
+ # $ANTLR start synpred117
+ def synpred117_fragment(self, ):
+ # C.g:409:13: ( '(' argument_expression_list ')' )
+ # C.g:409:13: '(' argument_expression_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_synpred1171420)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_argument_expression_list_in_synpred1171424)
+ self.argument_expression_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_synpred1171428)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred117
+
+
+
+ # $ANTLR start synpred118
+ def synpred118_fragment(self, ):
+ # C.g:410:13: ( '(' macro_parameter_list ')' )
+ # C.g:410:13: '(' macro_parameter_list ')'
+ self.match(self.input, 62, self.FOLLOW_62_in_synpred1181444)
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_macro_parameter_list_in_synpred1181446)
+ self.macro_parameter_list()
+ self.following.pop()
+ if self.failed:
+ return
+ self.match(self.input, 63, self.FOLLOW_63_in_synpred1181448)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred118
+
+
+
+ # $ANTLR start synpred120
+ def synpred120_fragment(self, ):
+ # C.g:412:13: ( '*' IDENTIFIER )
+ # C.g:412:13: '*' IDENTIFIER
+ self.match(self.input, 66, self.FOLLOW_66_in_synpred1201482)
+ if self.failed:
+ return
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1201486)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred120
+
+
+
+ # $ANTLR start synpred137
+ def synpred137_fragment(self, ):
+ # C.g:443:20: ( STRING_LITERAL )
+ # C.g:443:20: STRING_LITERAL
+ self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1371683)
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred137
+
+
+
+ # $ANTLR start synpred138
+ def synpred138_fragment(self, ):
+ # C.g:443:8: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )
+ # C.g:443:8: ( IDENTIFIER )* ( STRING_LITERAL )+
+ # C.g:443:8: ( IDENTIFIER )*
+ while True: #loop125
+ alt125 = 2
+ LA125_0 = self.input.LA(1)
+
+ if (LA125_0 == IDENTIFIER) :
+ alt125 = 1
+
+
+ if alt125 == 1:
+ # C.g:0:0: IDENTIFIER
+ self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1381680)
+ if self.failed:
+ return
+
+
+ else:
+ break #loop125
+
+
+ # C.g:443:20: ( STRING_LITERAL )+
+ cnt126 = 0
+ while True: #loop126
+ alt126 = 2
+ LA126_0 = self.input.LA(1)
+
+ if (LA126_0 == STRING_LITERAL) :
+ alt126 = 1
+
+
+ if alt126 == 1:
+ # C.g:0:0: STRING_LITERAL
+ self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1381683)
+ if self.failed:
+ return
+
+
+ else:
+ if cnt126 >= 1:
+ break #loop126
+
+ if self.backtracking > 0:
+ self.failed = True
+ return
+
+ eee = EarlyExitException(126, self.input)
+ raise eee
+
+ cnt126 += 1
+
+
+
+
+ # $ANTLR end synpred138
+
+
+
+ # $ANTLR start synpred142
+ def synpred142_fragment(self, ):
+ # C.g:458:4: ( lvalue assignment_operator assignment_expression )
+ # C.g:458:4: lvalue assignment_operator assignment_expression
+ self.following.append(self.FOLLOW_lvalue_in_synpred1421744)
+ self.lvalue()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_assignment_operator_in_synpred1421746)
+ self.assignment_operator()
+ self.following.pop()
+ if self.failed:
+ return
+ self.following.append(self.FOLLOW_assignment_expression_in_synpred1421748)
+ self.assignment_expression()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred142
+
+
+
+ # $ANTLR start synpred169
+ def synpred169_fragment(self, ):
+ # C.g:520:4: ( expression_statement )
+ # C.g:520:4: expression_statement
+ self.following.append(self.FOLLOW_expression_statement_in_synpred1692035)
+ self.expression_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred169
+
+
+
+ # $ANTLR start synpred173
+ def synpred173_fragment(self, ):
+ # C.g:524:4: ( macro_statement )
+ # C.g:524:4: macro_statement
+ self.following.append(self.FOLLOW_macro_statement_in_synpred1732055)
+ self.macro_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred173
+
+
+
+ # $ANTLR start synpred174
+ def synpred174_fragment(self, ):
+ # C.g:525:4: ( asm2_statement )
+ # C.g:525:4: asm2_statement
+ self.following.append(self.FOLLOW_asm2_statement_in_synpred1742060)
+ self.asm2_statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred174
+
+
+
+ # $ANTLR start synpred181
+ def synpred181_fragment(self, ):
+ # C.g:544:19: ( declaration )
+ # C.g:544:19: declaration
+ self.following.append(self.FOLLOW_declaration_in_synpred1812166)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred181
+
+
+
+ # $ANTLR start synpred182
+ def synpred182_fragment(self, ):
+ # C.g:544:33: ( statement_list )
+ # C.g:544:33: statement_list
+ self.following.append(self.FOLLOW_statement_list_in_synpred1822170)
+ self.statement_list()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred182
+
+
+
+ # $ANTLR start synpred186
+ def synpred186_fragment(self, ):
+ # C.g:554:8: ( declaration )
+ # C.g:554:8: declaration
+ self.following.append(self.FOLLOW_declaration_in_synpred1862225)
+ self.declaration()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred186
+
+
+
+ # $ANTLR start synpred188
+ def synpred188_fragment(self, ):
+ # C.g:558:4: ( statement )
+ # C.g:558:4: statement
+ self.following.append(self.FOLLOW_statement_in_synpred1882242)
+ self.statement()
+ self.following.pop()
+ if self.failed:
+ return
+
+
+ # $ANTLR end synpred188
+
+
+
+ def synpred69(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred69_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred81(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred81_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred82(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred82_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred66(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred66_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred83(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred83_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred84(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred84_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred67(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred67_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred86(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred86_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred120(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred120_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred40(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred40_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred142(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred142_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred182(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred182_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred109(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred109_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred181(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred181_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred186(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred186_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred188(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred188_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred169(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred169_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred117(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred117_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred70(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred70_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred118(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred118_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred34(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred34_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred33(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred33_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred94(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred94_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred39(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred39_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred74(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred74_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred114(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred114_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred93(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred93_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred75(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred75_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred137(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred137_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred90(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred90_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred138(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred138_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred91(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred91_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred73(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred73_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred5(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred5_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred78(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred78_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred7(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred7_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred76(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred76_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred77(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred77_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred2(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred2_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred4(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred4_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred174(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred174_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred173(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred173_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred14(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred14_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred15(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred15_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+ def synpred10(self):
+ self.backtracking += 1
+ start = self.input.mark()
+ self.synpred10_fragment()
+ success = not self.failed
+ self.input.rewind(start)
+ self.backtracking -= 1
+ self.failed = False
+ return success
+
+
+
+
+
+ FOLLOW_external_declaration_in_translation_unit74 = frozenset([1, 4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
+ FOLLOW_function_definition_in_external_declaration113 = frozenset([1])
+ FOLLOW_declaration_in_external_declaration118 = frozenset([1])
+ FOLLOW_macro_statement_in_external_declaration123 = frozenset([1, 25])
+ FOLLOW_25_in_external_declaration126 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_function_definition157 = frozenset([4, 58, 59, 60, 62, 66])
+ FOLLOW_declarator_in_function_definition160 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_declaration_in_function_definition166 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_compound_statement_in_function_definition171 = frozenset([1])
+ FOLLOW_compound_statement_in_function_definition180 = frozenset([1])
+ FOLLOW_26_in_declaration203 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
+ FOLLOW_declaration_specifiers_in_declaration207 = frozenset([4, 58, 59, 60, 62, 66])
+ FOLLOW_init_declarator_list_in_declaration216 = frozenset([25])
+ FOLLOW_25_in_declaration220 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_declaration234 = frozenset([4, 25, 58, 59, 60, 62, 66])
+ FOLLOW_init_declarator_list_in_declaration238 = frozenset([25])
+ FOLLOW_25_in_declaration243 = frozenset([1])
+ FOLLOW_storage_class_specifier_in_declaration_specifiers264 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_specifier_in_declaration_specifiers272 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_qualifier_in_declaration_specifiers286 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_init_declarator_in_init_declarator_list308 = frozenset([1, 27])
+ FOLLOW_27_in_init_declarator_list311 = frozenset([4, 58, 59, 60, 62, 66])
+ FOLLOW_init_declarator_in_init_declarator_list313 = frozenset([1, 27])
+ FOLLOW_declarator_in_init_declarator326 = frozenset([1, 28])
+ FOLLOW_28_in_init_declarator329 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_initializer_in_init_declarator331 = frozenset([1])
+ FOLLOW_set_in_storage_class_specifier0 = frozenset([1])
+ FOLLOW_34_in_type_specifier376 = frozenset([1])
+ FOLLOW_35_in_type_specifier381 = frozenset([1])
+ FOLLOW_36_in_type_specifier386 = frozenset([1])
+ FOLLOW_37_in_type_specifier391 = frozenset([1])
+ FOLLOW_38_in_type_specifier396 = frozenset([1])
+ FOLLOW_39_in_type_specifier401 = frozenset([1])
+ FOLLOW_40_in_type_specifier406 = frozenset([1])
+ FOLLOW_41_in_type_specifier411 = frozenset([1])
+ FOLLOW_42_in_type_specifier416 = frozenset([1])
+ FOLLOW_struct_or_union_specifier_in_type_specifier423 = frozenset([1])
+ FOLLOW_enum_specifier_in_type_specifier433 = frozenset([1])
+ FOLLOW_type_id_in_type_specifier451 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_type_id467 = frozenset([1])
+ FOLLOW_struct_or_union_in_struct_or_union_specifier494 = frozenset([4, 43])
+ FOLLOW_IDENTIFIER_in_struct_or_union_specifier496 = frozenset([43])
+ FOLLOW_43_in_struct_or_union_specifier499 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_struct_declaration_list_in_struct_or_union_specifier501 = frozenset([44])
+ FOLLOW_44_in_struct_or_union_specifier503 = frozenset([1])
+ FOLLOW_struct_or_union_in_struct_or_union_specifier508 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_struct_or_union_specifier510 = frozenset([1])
+ FOLLOW_set_in_struct_or_union0 = frozenset([1])
+ FOLLOW_struct_declaration_in_struct_declaration_list537 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_specifier_qualifier_list_in_struct_declaration549 = frozenset([4, 47, 58, 59, 60, 62, 66])
+ FOLLOW_struct_declarator_list_in_struct_declaration551 = frozenset([25])
+ FOLLOW_25_in_struct_declaration553 = frozenset([1])
+ FOLLOW_type_qualifier_in_specifier_qualifier_list566 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_specifier_in_specifier_qualifier_list570 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_struct_declarator_in_struct_declarator_list584 = frozenset([1, 27])
+ FOLLOW_27_in_struct_declarator_list587 = frozenset([4, 47, 58, 59, 60, 62, 66])
+ FOLLOW_struct_declarator_in_struct_declarator_list589 = frozenset([1, 27])
+ FOLLOW_declarator_in_struct_declarator602 = frozenset([1, 47])
+ FOLLOW_47_in_struct_declarator605 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_constant_expression_in_struct_declarator607 = frozenset([1])
+ FOLLOW_47_in_struct_declarator614 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_constant_expression_in_struct_declarator616 = frozenset([1])
+ FOLLOW_48_in_enum_specifier634 = frozenset([43])
+ FOLLOW_43_in_enum_specifier636 = frozenset([4])
+ FOLLOW_enumerator_list_in_enum_specifier638 = frozenset([27, 44])
+ FOLLOW_27_in_enum_specifier640 = frozenset([44])
+ FOLLOW_44_in_enum_specifier643 = frozenset([1])
+ FOLLOW_48_in_enum_specifier648 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_enum_specifier650 = frozenset([43])
+ FOLLOW_43_in_enum_specifier652 = frozenset([4])
+ FOLLOW_enumerator_list_in_enum_specifier654 = frozenset([27, 44])
+ FOLLOW_27_in_enum_specifier656 = frozenset([44])
+ FOLLOW_44_in_enum_specifier659 = frozenset([1])
+ FOLLOW_48_in_enum_specifier664 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_enum_specifier666 = frozenset([1])
+ FOLLOW_enumerator_in_enumerator_list677 = frozenset([1, 27])
+ FOLLOW_27_in_enumerator_list680 = frozenset([4])
+ FOLLOW_enumerator_in_enumerator_list682 = frozenset([1, 27])
+ FOLLOW_IDENTIFIER_in_enumerator695 = frozenset([1, 28])
+ FOLLOW_28_in_enumerator698 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_constant_expression_in_enumerator700 = frozenset([1])
+ FOLLOW_set_in_type_qualifier0 = frozenset([1])
+ FOLLOW_pointer_in_declarator784 = frozenset([4, 58, 59, 60, 62])
+ FOLLOW_58_in_declarator788 = frozenset([4, 59, 60, 62])
+ FOLLOW_59_in_declarator793 = frozenset([4, 60, 62])
+ FOLLOW_60_in_declarator798 = frozenset([4, 62])
+ FOLLOW_direct_declarator_in_declarator802 = frozenset([1])
+ FOLLOW_pointer_in_declarator808 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_direct_declarator819 = frozenset([1, 62, 64])
+ FOLLOW_declarator_suffix_in_direct_declarator821 = frozenset([1, 62, 64])
+ FOLLOW_62_in_direct_declarator827 = frozenset([4, 58, 59, 60, 62, 66])
+ FOLLOW_58_in_direct_declarator830 = frozenset([4, 58, 59, 60, 62, 66])
+ FOLLOW_declarator_in_direct_declarator834 = frozenset([63])
+ FOLLOW_63_in_direct_declarator836 = frozenset([62, 64])
+ FOLLOW_declarator_suffix_in_direct_declarator838 = frozenset([1, 62, 64])
+ FOLLOW_64_in_declarator_suffix852 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_constant_expression_in_declarator_suffix854 = frozenset([65])
+ FOLLOW_65_in_declarator_suffix856 = frozenset([1])
+ FOLLOW_64_in_declarator_suffix866 = frozenset([65])
+ FOLLOW_65_in_declarator_suffix868 = frozenset([1])
+ FOLLOW_62_in_declarator_suffix878 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_parameter_type_list_in_declarator_suffix880 = frozenset([63])
+ FOLLOW_63_in_declarator_suffix882 = frozenset([1])
+ FOLLOW_62_in_declarator_suffix892 = frozenset([4])
+ FOLLOW_identifier_list_in_declarator_suffix894 = frozenset([63])
+ FOLLOW_63_in_declarator_suffix896 = frozenset([1])
+ FOLLOW_62_in_declarator_suffix906 = frozenset([63])
+ FOLLOW_63_in_declarator_suffix908 = frozenset([1])
+ FOLLOW_66_in_pointer919 = frozenset([49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_qualifier_in_pointer921 = frozenset([1, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_pointer_in_pointer924 = frozenset([1])
+ FOLLOW_66_in_pointer930 = frozenset([66])
+ FOLLOW_pointer_in_pointer932 = frozenset([1])
+ FOLLOW_66_in_pointer937 = frozenset([1])
+ FOLLOW_parameter_list_in_parameter_type_list948 = frozenset([1, 27])
+ FOLLOW_27_in_parameter_type_list951 = frozenset([53, 67])
+ FOLLOW_53_in_parameter_type_list954 = frozenset([67])
+ FOLLOW_67_in_parameter_type_list958 = frozenset([1])
+ FOLLOW_parameter_declaration_in_parameter_list971 = frozenset([1, 27])
+ FOLLOW_27_in_parameter_list974 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_53_in_parameter_list977 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_parameter_declaration_in_parameter_list981 = frozenset([1, 27])
+ FOLLOW_declaration_specifiers_in_parameter_declaration994 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
+ FOLLOW_declarator_in_parameter_declaration997 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
+ FOLLOW_abstract_declarator_in_parameter_declaration999 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
+ FOLLOW_53_in_parameter_declaration1004 = frozenset([1])
+ FOLLOW_pointer_in_parameter_declaration1013 = frozenset([4, 66])
+ FOLLOW_IDENTIFIER_in_parameter_declaration1016 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_identifier_list1027 = frozenset([1, 27])
+ FOLLOW_27_in_identifier_list1031 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_identifier_list1033 = frozenset([1, 27])
+ FOLLOW_specifier_qualifier_list_in_type_name1046 = frozenset([1, 62, 64, 66])
+ FOLLOW_abstract_declarator_in_type_name1048 = frozenset([1])
+ FOLLOW_type_id_in_type_name1054 = frozenset([1])
+ FOLLOW_pointer_in_abstract_declarator1065 = frozenset([1, 62, 64])
+ FOLLOW_direct_abstract_declarator_in_abstract_declarator1067 = frozenset([1])
+ FOLLOW_direct_abstract_declarator_in_abstract_declarator1073 = frozenset([1])
+ FOLLOW_62_in_direct_abstract_declarator1086 = frozenset([62, 64, 66])
+ FOLLOW_abstract_declarator_in_direct_abstract_declarator1088 = frozenset([63])
+ FOLLOW_63_in_direct_abstract_declarator1090 = frozenset([1, 62, 64])
+ FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1094 = frozenset([1, 62, 64])
+ FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1098 = frozenset([1, 62, 64])
+ FOLLOW_64_in_abstract_declarator_suffix1110 = frozenset([65])
+ FOLLOW_65_in_abstract_declarator_suffix1112 = frozenset([1])
+ FOLLOW_64_in_abstract_declarator_suffix1117 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_constant_expression_in_abstract_declarator_suffix1119 = frozenset([65])
+ FOLLOW_65_in_abstract_declarator_suffix1121 = frozenset([1])
+ FOLLOW_62_in_abstract_declarator_suffix1126 = frozenset([63])
+ FOLLOW_63_in_abstract_declarator_suffix1128 = frozenset([1])
+ FOLLOW_62_in_abstract_declarator_suffix1133 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_parameter_type_list_in_abstract_declarator_suffix1135 = frozenset([63])
+ FOLLOW_63_in_abstract_declarator_suffix1137 = frozenset([1])
+ FOLLOW_assignment_expression_in_initializer1150 = frozenset([1])
+ FOLLOW_43_in_initializer1155 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_initializer_list_in_initializer1157 = frozenset([27, 44])
+ FOLLOW_27_in_initializer1159 = frozenset([44])
+ FOLLOW_44_in_initializer1162 = frozenset([1])
+ FOLLOW_initializer_in_initializer_list1173 = frozenset([1, 27])
+ FOLLOW_27_in_initializer_list1176 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_initializer_in_initializer_list1178 = frozenset([1, 27])
+ FOLLOW_assignment_expression_in_argument_expression_list1196 = frozenset([1, 27, 53])
+ FOLLOW_53_in_argument_expression_list1199 = frozenset([1, 27])
+ FOLLOW_27_in_argument_expression_list1204 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_assignment_expression_in_argument_expression_list1206 = frozenset([1, 27, 53])
+ FOLLOW_53_in_argument_expression_list1209 = frozenset([1, 27])
+ FOLLOW_multiplicative_expression_in_additive_expression1225 = frozenset([1, 68, 69])
+ FOLLOW_68_in_additive_expression1229 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_multiplicative_expression_in_additive_expression1231 = frozenset([1, 68, 69])
+ FOLLOW_69_in_additive_expression1235 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_multiplicative_expression_in_additive_expression1237 = frozenset([1, 68, 69])
+ FOLLOW_cast_expression_in_multiplicative_expression1251 = frozenset([1, 66, 70, 71])
+ FOLLOW_66_in_multiplicative_expression1255 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_cast_expression_in_multiplicative_expression1257 = frozenset([1, 66, 70, 71])
+ FOLLOW_70_in_multiplicative_expression1261 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_cast_expression_in_multiplicative_expression1263 = frozenset([1, 66, 70, 71])
+ FOLLOW_71_in_multiplicative_expression1267 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_cast_expression_in_multiplicative_expression1269 = frozenset([1, 66, 70, 71])
+ FOLLOW_62_in_cast_expression1282 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_name_in_cast_expression1284 = frozenset([63])
+ FOLLOW_63_in_cast_expression1286 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_cast_expression_in_cast_expression1288 = frozenset([1])
+ FOLLOW_unary_expression_in_cast_expression1293 = frozenset([1])
+ FOLLOW_postfix_expression_in_unary_expression1304 = frozenset([1])
+ FOLLOW_72_in_unary_expression1309 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_unary_expression_in_unary_expression1311 = frozenset([1])
+ FOLLOW_73_in_unary_expression1316 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_unary_expression_in_unary_expression1318 = frozenset([1])
+ FOLLOW_unary_operator_in_unary_expression1323 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_cast_expression_in_unary_expression1325 = frozenset([1])
+ FOLLOW_74_in_unary_expression1330 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_unary_expression_in_unary_expression1332 = frozenset([1])
+ FOLLOW_74_in_unary_expression1337 = frozenset([62])
+ FOLLOW_62_in_unary_expression1339 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_name_in_unary_expression1341 = frozenset([63])
+ FOLLOW_63_in_unary_expression1343 = frozenset([1])
+ FOLLOW_primary_expression_in_postfix_expression1367 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_64_in_postfix_expression1383 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_postfix_expression1385 = frozenset([65])
+ FOLLOW_65_in_postfix_expression1387 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_62_in_postfix_expression1401 = frozenset([63])
+ FOLLOW_63_in_postfix_expression1405 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_62_in_postfix_expression1420 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_argument_expression_list_in_postfix_expression1424 = frozenset([63])
+ FOLLOW_63_in_postfix_expression1428 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_62_in_postfix_expression1444 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_macro_parameter_list_in_postfix_expression1446 = frozenset([63])
+ FOLLOW_63_in_postfix_expression1448 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_75_in_postfix_expression1462 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_postfix_expression1466 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_66_in_postfix_expression1482 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_postfix_expression1486 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_76_in_postfix_expression1502 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_postfix_expression1506 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_72_in_postfix_expression1522 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_73_in_postfix_expression1536 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
+ FOLLOW_parameter_declaration_in_macro_parameter_list1559 = frozenset([1, 27])
+ FOLLOW_27_in_macro_parameter_list1562 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_parameter_declaration_in_macro_parameter_list1564 = frozenset([1, 27])
+ FOLLOW_set_in_unary_operator0 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_primary_expression1613 = frozenset([1])
+ FOLLOW_constant_in_primary_expression1618 = frozenset([1])
+ FOLLOW_62_in_primary_expression1623 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_primary_expression1625 = frozenset([63])
+ FOLLOW_63_in_primary_expression1627 = frozenset([1])
+ FOLLOW_HEX_LITERAL_in_constant1643 = frozenset([1])
+ FOLLOW_OCTAL_LITERAL_in_constant1653 = frozenset([1])
+ FOLLOW_DECIMAL_LITERAL_in_constant1663 = frozenset([1])
+ FOLLOW_CHARACTER_LITERAL_in_constant1671 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_constant1680 = frozenset([4, 9])
+ FOLLOW_STRING_LITERAL_in_constant1683 = frozenset([1, 4, 9])
+ FOLLOW_IDENTIFIER_in_constant1688 = frozenset([1, 4])
+ FOLLOW_FLOATING_POINT_LITERAL_in_constant1699 = frozenset([1])
+ FOLLOW_assignment_expression_in_expression1715 = frozenset([1, 27])
+ FOLLOW_27_in_expression1718 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_assignment_expression_in_expression1720 = frozenset([1, 27])
+ FOLLOW_conditional_expression_in_constant_expression1733 = frozenset([1])
+ FOLLOW_lvalue_in_assignment_expression1744 = frozenset([28, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89])
+ FOLLOW_assignment_operator_in_assignment_expression1746 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_assignment_expression_in_assignment_expression1748 = frozenset([1])
+ FOLLOW_conditional_expression_in_assignment_expression1753 = frozenset([1])
+ FOLLOW_unary_expression_in_lvalue1765 = frozenset([1])
+ FOLLOW_set_in_assignment_operator0 = frozenset([1])
+ FOLLOW_logical_or_expression_in_conditional_expression1839 = frozenset([1, 90])
+ FOLLOW_90_in_conditional_expression1842 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_conditional_expression1844 = frozenset([47])
+ FOLLOW_47_in_conditional_expression1846 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_conditional_expression_in_conditional_expression1848 = frozenset([1])
+ FOLLOW_logical_and_expression_in_logical_or_expression1863 = frozenset([1, 91])
+ FOLLOW_91_in_logical_or_expression1866 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_logical_and_expression_in_logical_or_expression1868 = frozenset([1, 91])
+ FOLLOW_inclusive_or_expression_in_logical_and_expression1881 = frozenset([1, 92])
+ FOLLOW_92_in_logical_and_expression1884 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_inclusive_or_expression_in_logical_and_expression1886 = frozenset([1, 92])
+ FOLLOW_exclusive_or_expression_in_inclusive_or_expression1899 = frozenset([1, 93])
+ FOLLOW_93_in_inclusive_or_expression1902 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_exclusive_or_expression_in_inclusive_or_expression1904 = frozenset([1, 93])
+ FOLLOW_and_expression_in_exclusive_or_expression1917 = frozenset([1, 94])
+ FOLLOW_94_in_exclusive_or_expression1920 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_and_expression_in_exclusive_or_expression1922 = frozenset([1, 94])
+ FOLLOW_equality_expression_in_and_expression1935 = frozenset([1, 77])
+ FOLLOW_77_in_and_expression1938 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_equality_expression_in_and_expression1940 = frozenset([1, 77])
+ FOLLOW_relational_expression_in_equality_expression1952 = frozenset([1, 95, 96])
+ FOLLOW_set_in_equality_expression1955 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_relational_expression_in_equality_expression1961 = frozenset([1, 95, 96])
+ FOLLOW_shift_expression_in_relational_expression1975 = frozenset([1, 97, 98, 99, 100])
+ FOLLOW_set_in_relational_expression1978 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_shift_expression_in_relational_expression1988 = frozenset([1, 97, 98, 99, 100])
+ FOLLOW_additive_expression_in_shift_expression2001 = frozenset([1, 101, 102])
+ FOLLOW_set_in_shift_expression2004 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_additive_expression_in_shift_expression2010 = frozenset([1, 101, 102])
+ FOLLOW_labeled_statement_in_statement2025 = frozenset([1])
+ FOLLOW_compound_statement_in_statement2030 = frozenset([1])
+ FOLLOW_expression_statement_in_statement2035 = frozenset([1])
+ FOLLOW_selection_statement_in_statement2040 = frozenset([1])
+ FOLLOW_iteration_statement_in_statement2045 = frozenset([1])
+ FOLLOW_jump_statement_in_statement2050 = frozenset([1])
+ FOLLOW_macro_statement_in_statement2055 = frozenset([1])
+ FOLLOW_asm2_statement_in_statement2060 = frozenset([1])
+ FOLLOW_asm1_statement_in_statement2065 = frozenset([1])
+ FOLLOW_asm_statement_in_statement2070 = frozenset([1])
+ FOLLOW_declaration_in_statement2075 = frozenset([1])
+ FOLLOW_103_in_asm2_statement2086 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_asm2_statement2089 = frozenset([62])
+ FOLLOW_62_in_asm2_statement2091 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_set_in_asm2_statement2094 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_63_in_asm2_statement2101 = frozenset([25])
+ FOLLOW_25_in_asm2_statement2103 = frozenset([1])
+ FOLLOW_104_in_asm1_statement2115 = frozenset([43])
+ FOLLOW_43_in_asm1_statement2117 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_set_in_asm1_statement2120 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_44_in_asm1_statement2127 = frozenset([1])
+ FOLLOW_105_in_asm_statement2138 = frozenset([43])
+ FOLLOW_43_in_asm_statement2140 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_set_in_asm_statement2143 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_44_in_asm_statement2150 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_macro_statement2162 = frozenset([62])
+ FOLLOW_62_in_macro_statement2164 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_declaration_in_macro_statement2166 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_list_in_macro_statement2170 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_macro_statement2173 = frozenset([63])
+ FOLLOW_63_in_macro_statement2176 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_labeled_statement2188 = frozenset([47])
+ FOLLOW_47_in_labeled_statement2190 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_labeled_statement2192 = frozenset([1])
+ FOLLOW_106_in_labeled_statement2197 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_constant_expression_in_labeled_statement2199 = frozenset([47])
+ FOLLOW_47_in_labeled_statement2201 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_labeled_statement2203 = frozenset([1])
+ FOLLOW_107_in_labeled_statement2208 = frozenset([47])
+ FOLLOW_47_in_labeled_statement2210 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_labeled_statement2212 = frozenset([1])
+ FOLLOW_43_in_compound_statement2223 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_declaration_in_compound_statement2225 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_list_in_compound_statement2228 = frozenset([44])
+ FOLLOW_44_in_compound_statement2231 = frozenset([1])
+ FOLLOW_statement_in_statement_list2242 = frozenset([1, 4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_25_in_expression_statement2254 = frozenset([1])
+ FOLLOW_expression_in_expression_statement2259 = frozenset([25])
+ FOLLOW_25_in_expression_statement2261 = frozenset([1])
+ FOLLOW_108_in_selection_statement2272 = frozenset([62])
+ FOLLOW_62_in_selection_statement2274 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_selection_statement2278 = frozenset([63])
+ FOLLOW_63_in_selection_statement2280 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_selection_statement2284 = frozenset([1, 109])
+ FOLLOW_109_in_selection_statement2299 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_selection_statement2301 = frozenset([1])
+ FOLLOW_110_in_selection_statement2308 = frozenset([62])
+ FOLLOW_62_in_selection_statement2310 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_selection_statement2312 = frozenset([63])
+ FOLLOW_63_in_selection_statement2314 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_selection_statement2316 = frozenset([1])
+ FOLLOW_111_in_iteration_statement2327 = frozenset([62])
+ FOLLOW_62_in_iteration_statement2329 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_iteration_statement2333 = frozenset([63])
+ FOLLOW_63_in_iteration_statement2335 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_iteration_statement2337 = frozenset([1])
+ FOLLOW_112_in_iteration_statement2344 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_iteration_statement2346 = frozenset([111])
+ FOLLOW_111_in_iteration_statement2348 = frozenset([62])
+ FOLLOW_62_in_iteration_statement2350 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_iteration_statement2354 = frozenset([63])
+ FOLLOW_63_in_iteration_statement2356 = frozenset([25])
+ FOLLOW_25_in_iteration_statement2358 = frozenset([1])
+ FOLLOW_113_in_iteration_statement2365 = frozenset([62])
+ FOLLOW_62_in_iteration_statement2367 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_statement_in_iteration_statement2369 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_statement_in_iteration_statement2373 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_iteration_statement2375 = frozenset([63])
+ FOLLOW_63_in_iteration_statement2378 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
+ FOLLOW_statement_in_iteration_statement2380 = frozenset([1])
+ FOLLOW_114_in_jump_statement2393 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_jump_statement2395 = frozenset([25])
+ FOLLOW_25_in_jump_statement2397 = frozenset([1])
+ FOLLOW_115_in_jump_statement2402 = frozenset([25])
+ FOLLOW_25_in_jump_statement2404 = frozenset([1])
+ FOLLOW_116_in_jump_statement2409 = frozenset([25])
+ FOLLOW_25_in_jump_statement2411 = frozenset([1])
+ FOLLOW_117_in_jump_statement2416 = frozenset([25])
+ FOLLOW_25_in_jump_statement2418 = frozenset([1])
+ FOLLOW_117_in_jump_statement2423 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_expression_in_jump_statement2425 = frozenset([25])
+ FOLLOW_25_in_jump_statement2427 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_synpred2100 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_synpred4100 = frozenset([4, 58, 59, 60, 62, 66])
+ FOLLOW_declarator_in_synpred4103 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_declaration_in_synpred4105 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_43_in_synpred4108 = frozenset([1])
+ FOLLOW_declaration_in_synpred5118 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_synpred7157 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_synpred10207 = frozenset([1])
+ FOLLOW_type_specifier_in_synpred14272 = frozenset([1])
+ FOLLOW_type_qualifier_in_synpred15286 = frozenset([1])
+ FOLLOW_type_qualifier_in_synpred33444 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_synpred34442 = frozenset([4, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
+ FOLLOW_type_qualifier_in_synpred34444 = frozenset([4, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
+ FOLLOW_declarator_in_synpred34447 = frozenset([1])
+ FOLLOW_type_qualifier_in_synpred39566 = frozenset([1])
+ FOLLOW_type_specifier_in_synpred40570 = frozenset([1])
+ FOLLOW_pointer_in_synpred66784 = frozenset([4, 58, 59, 60, 62])
+ FOLLOW_58_in_synpred66788 = frozenset([4, 59, 60, 62])
+ FOLLOW_59_in_synpred66793 = frozenset([4, 60, 62])
+ FOLLOW_60_in_synpred66798 = frozenset([4, 62])
+ FOLLOW_direct_declarator_in_synpred66802 = frozenset([1])
+ FOLLOW_declarator_suffix_in_synpred67821 = frozenset([1])
+ FOLLOW_58_in_synpred69830 = frozenset([1])
+ FOLLOW_declarator_suffix_in_synpred70838 = frozenset([1])
+ FOLLOW_62_in_synpred73878 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_parameter_type_list_in_synpred73880 = frozenset([63])
+ FOLLOW_63_in_synpred73882 = frozenset([1])
+ FOLLOW_62_in_synpred74892 = frozenset([4])
+ FOLLOW_identifier_list_in_synpred74894 = frozenset([63])
+ FOLLOW_63_in_synpred74896 = frozenset([1])
+ FOLLOW_type_qualifier_in_synpred75921 = frozenset([1])
+ FOLLOW_pointer_in_synpred76924 = frozenset([1])
+ FOLLOW_66_in_synpred77919 = frozenset([49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_qualifier_in_synpred77921 = frozenset([1, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_pointer_in_synpred77924 = frozenset([1])
+ FOLLOW_66_in_synpred78930 = frozenset([66])
+ FOLLOW_pointer_in_synpred78932 = frozenset([1])
+ FOLLOW_53_in_synpred81977 = frozenset([1])
+ FOLLOW_27_in_synpred82974 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_53_in_synpred82977 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_parameter_declaration_in_synpred82981 = frozenset([1])
+ FOLLOW_declarator_in_synpred83997 = frozenset([1])
+ FOLLOW_abstract_declarator_in_synpred84999 = frozenset([1])
+ FOLLOW_declaration_specifiers_in_synpred86994 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
+ FOLLOW_declarator_in_synpred86997 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
+ FOLLOW_abstract_declarator_in_synpred86999 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
+ FOLLOW_53_in_synpred861004 = frozenset([1])
+ FOLLOW_specifier_qualifier_list_in_synpred901046 = frozenset([1, 62, 64, 66])
+ FOLLOW_abstract_declarator_in_synpred901048 = frozenset([1])
+ FOLLOW_direct_abstract_declarator_in_synpred911067 = frozenset([1])
+ FOLLOW_62_in_synpred931086 = frozenset([62, 64, 66])
+ FOLLOW_abstract_declarator_in_synpred931088 = frozenset([63])
+ FOLLOW_63_in_synpred931090 = frozenset([1])
+ FOLLOW_abstract_declarator_suffix_in_synpred941098 = frozenset([1])
+ FOLLOW_62_in_synpred1091282 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
+ FOLLOW_type_name_in_synpred1091284 = frozenset([63])
+ FOLLOW_63_in_synpred1091286 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_cast_expression_in_synpred1091288 = frozenset([1])
+ FOLLOW_74_in_synpred1141330 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_unary_expression_in_synpred1141332 = frozenset([1])
+ FOLLOW_62_in_synpred1171420 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_argument_expression_list_in_synpred1171424 = frozenset([63])
+ FOLLOW_63_in_synpred1171428 = frozenset([1])
+ FOLLOW_62_in_synpred1181444 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
+ FOLLOW_macro_parameter_list_in_synpred1181446 = frozenset([63])
+ FOLLOW_63_in_synpred1181448 = frozenset([1])
+ FOLLOW_66_in_synpred1201482 = frozenset([4])
+ FOLLOW_IDENTIFIER_in_synpred1201486 = frozenset([1])
+ FOLLOW_STRING_LITERAL_in_synpred1371683 = frozenset([1])
+ FOLLOW_IDENTIFIER_in_synpred1381680 = frozenset([4, 9])
+ FOLLOW_STRING_LITERAL_in_synpred1381683 = frozenset([1, 9])
+ FOLLOW_lvalue_in_synpred1421744 = frozenset([28, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89])
+ FOLLOW_assignment_operator_in_synpred1421746 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
+ FOLLOW_assignment_expression_in_synpred1421748 = frozenset([1])
+ FOLLOW_expression_statement_in_synpred1692035 = frozenset([1])
+ FOLLOW_macro_statement_in_synpred1732055 = frozenset([1])
+ FOLLOW_asm2_statement_in_synpred1742060 = frozenset([1])
+ FOLLOW_declaration_in_synpred1812166 = frozenset([1])
+ FOLLOW_statement_list_in_synpred1822170 = frozenset([1])
+ FOLLOW_declaration_in_synpred1862225 = frozenset([1])
+ FOLLOW_statement_in_synpred1882242 = frozenset([1])
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser3/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser3/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser3/__init__.py
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/CLexer.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/CLexer.py
new file mode 100755
index 00000000..6d5fc372
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/CLexer.py
@@ -0,0 +1,627 @@
+# Generated from C.g4 by ANTLR 4.7.1
+from antlr4 import *
+from io import StringIO
+from typing.io import TextIO
+import sys
+
+
+## @file
+# The file defines the parser for C source files.
+#
+# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+import Ecc.CodeFragment as CodeFragment
+import Ecc.FileProfile as FileProfile
+
+
+def serializedATN():
+ with StringIO() as buf:
+ buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2k")
+ buf.write("\u0383\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
+ buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
+ buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
+ buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
+ buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
+ buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%")
+ buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.")
+ buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64")
+ buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:")
+ buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t")
+ buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t")
+ buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t")
+ buf.write("U\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4")
+ buf.write("^\t^\4_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4")
+ buf.write("g\tg\4h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4")
+ buf.write("p\tp\4q\tq\4r\tr\3\2\3\2\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3")
+ buf.write("\4\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7")
+ buf.write("\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\n\3")
+ buf.write("\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13")
+ buf.write("\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\16")
+ buf.write("\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\20\3\20")
+ buf.write("\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22")
+ buf.write("\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23")
+ buf.write("\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25")
+ buf.write("\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27")
+ buf.write("\3\27\3\27\3\27\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\32")
+ buf.write("\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\33")
+ buf.write("\3\33\3\33\3\33\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\36")
+ buf.write("\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37")
+ buf.write("\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3!\3!\3")
+ buf.write("!\3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3")
+ buf.write("\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"")
+ buf.write("\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#")
+ buf.write("\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3%\3")
+ buf.write("%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3")
+ buf.write("&\3&\3&\3&\3&\3&\3&\3\'\3\'\3(\3(\3)\3)\3*\3*\3+\3+\3")
+ buf.write(",\3,\3,\3,\3-\3-\3.\3.\3/\3/\3\60\3\60\3\61\3\61\3\61")
+ buf.write("\3\62\3\62\3\62\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\64")
+ buf.write("\3\64\3\65\3\65\3\65\3\66\3\66\3\67\3\67\38\38\39\39\3")
+ buf.write("9\3:\3:\3:\3;\3;\3;\3<\3<\3<\3=\3=\3=\3>\3>\3>\3>\3?\3")
+ buf.write("?\3?\3?\3@\3@\3@\3A\3A\3A\3B\3B\3B\3C\3C\3D\3D\3D\3E\3")
+ buf.write("E\3E\3F\3F\3G\3G\3H\3H\3H\3I\3I\3I\3J\3J\3K\3K\3L\3L\3")
+ buf.write("L\3M\3M\3M\3N\3N\3N\3O\3O\3O\3P\3P\3P\3P\3P\3P\3P\3P\3")
+ buf.write("Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3R\3R\3S\3S\3S\3S\3S\3T\3T\3")
+ buf.write("T\3T\3T\3T\3T\3T\3U\3U\3U\3V\3V\3V\3V\3V\3W\3W\3W\3W\3")
+ buf.write("W\3W\3W\3X\3X\3X\3X\3X\3X\3Y\3Y\3Y\3Z\3Z\3Z\3Z\3Z\3[\3")
+ buf.write("[\3[\3[\3[\3[\3[\3[\3[\3\\\3\\\3\\\3\\\3\\\3\\\3]\3]\3")
+ buf.write("]\3]\3]\3]\3]\3^\3^\3^\7^\u02b2\n^\f^\16^\u02b5\13^\3")
+ buf.write("_\3_\3`\5`\u02ba\n`\3`\3`\3`\5`\u02bf\n`\3`\3`\3a\5a\u02c4")
+ buf.write("\na\3a\3a\3a\7a\u02c9\na\fa\16a\u02cc\13a\3a\3a\3b\3b")
+ buf.write("\3b\6b\u02d3\nb\rb\16b\u02d4\3b\5b\u02d8\nb\3c\3c\3c\7")
+ buf.write("c\u02dd\nc\fc\16c\u02e0\13c\5c\u02e2\nc\3c\5c\u02e5\n")
+ buf.write("c\3d\3d\6d\u02e9\nd\rd\16d\u02ea\3d\5d\u02ee\nd\3e\3e")
+ buf.write("\3f\3f\3f\3f\3f\3f\5f\u02f8\nf\3g\6g\u02fb\ng\rg\16g\u02fc")
+ buf.write("\3g\3g\7g\u0301\ng\fg\16g\u0304\13g\3g\5g\u0307\ng\3g")
+ buf.write("\5g\u030a\ng\3g\3g\6g\u030e\ng\rg\16g\u030f\3g\5g\u0313")
+ buf.write("\ng\3g\5g\u0316\ng\3g\6g\u0319\ng\rg\16g\u031a\3g\3g\5")
+ buf.write("g\u031f\ng\3g\6g\u0322\ng\rg\16g\u0323\3g\5g\u0327\ng")
+ buf.write("\3g\5g\u032a\ng\3h\3h\5h\u032e\nh\3h\6h\u0331\nh\rh\16")
+ buf.write("h\u0332\3i\3i\3j\3j\3j\5j\u033a\nj\3k\3k\3k\3k\3k\3k\3")
+ buf.write("k\3k\3k\5k\u0345\nk\3l\3l\3l\3l\3l\3l\3l\3m\3m\3m\3m\3")
+ buf.write("n\3n\3n\3n\3o\3o\3p\3p\3p\3p\7p\u035c\np\fp\16p\u035f")
+ buf.write("\13p\3p\3p\3p\3p\3p\3q\3q\3q\3q\7q\u036a\nq\fq\16q\u036d")
+ buf.write("\13q\3q\5q\u0370\nq\3q\3q\3q\3q\3r\3r\7r\u0378\nr\fr\16")
+ buf.write("r\u037b\13r\3r\5r\u037e\nr\3r\3r\3r\3r\3\u035d\2s\3\3")
+ buf.write("\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16")
+ buf.write("\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61")
+ buf.write("\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*")
+ buf.write("S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u<w")
+ buf.write("=y>{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089F\u008b")
+ buf.write("G\u008dH\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009b")
+ buf.write("O\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00ab")
+ buf.write("W\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb")
+ buf.write("_\u00bd\2\u00bf`\u00c1a\u00c3b\u00c5c\u00c7d\u00c9\2\u00cb")
+ buf.write("\2\u00cde\u00cf\2\u00d1\2\u00d3\2\u00d5\2\u00d7\2\u00d9")
+ buf.write("f\u00dbg\u00ddh\u00dfi\u00e1j\u00e3k\3\2\20\6\2&&C\\a")
+ buf.write("ac|\4\2))^^\4\2$$^^\4\2ZZzz\5\2\62;CHch\6\2NNWWnnww\4")
+ buf.write("\2WWww\4\2NNnn\4\2GGgg\4\2--//\6\2FFHHffhh\t\2))^^ddh")
+ buf.write("hppttvv\5\2\13\f\16\17\"\"\4\2\f\f\17\17\2\u03a2\2\3\3")
+ buf.write("\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2")
+ buf.write("\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2")
+ buf.write("\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2")
+ buf.write("\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2")
+ buf.write("\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3")
+ buf.write("\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2")
+ buf.write("\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3")
+ buf.write("\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K")
+ buf.write("\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2")
+ buf.write("U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2")
+ buf.write("\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2")
+ buf.write("\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2")
+ buf.write("\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2y\3\2\2\2\2{\3")
+ buf.write("\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3\2\2\2\2\u0083")
+ buf.write("\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2")
+ buf.write("\2\2\u008b\3\2\2\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091")
+ buf.write("\3\2\2\2\2\u0093\3\2\2\2\2\u0095\3\2\2\2\2\u0097\3\2\2")
+ buf.write("\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\2\u009d\3\2\2\2\2\u009f")
+ buf.write("\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2")
+ buf.write("\2\2\u00a7\3\2\2\2\2\u00a9\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad")
+ buf.write("\3\2\2\2\2\u00af\3\2\2\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2")
+ buf.write("\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2\2\2\u00b9\3\2\2\2\2\u00bb")
+ buf.write("\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2")
+ buf.write("\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00cd\3\2\2\2\2\u00d9")
+ buf.write("\3\2\2\2\2\u00db\3\2\2\2\2\u00dd\3\2\2\2\2\u00df\3\2\2")
+ buf.write("\2\2\u00e1\3\2\2\2\2\u00e3\3\2\2\2\3\u00e5\3\2\2\2\5\u00e7")
+ buf.write("\3\2\2\2\7\u00e9\3\2\2\2\t\u00f1\3\2\2\2\13\u00f3\3\2")
+ buf.write("\2\2\r\u00f5\3\2\2\2\17\u00fc\3\2\2\2\21\u0103\3\2\2\2")
+ buf.write("\23\u0108\3\2\2\2\25\u0111\3\2\2\2\27\u0118\3\2\2\2\31")
+ buf.write("\u011d\3\2\2\2\33\u0122\3\2\2\2\35\u0128\3\2\2\2\37\u012c")
+ buf.write("\3\2\2\2!\u0131\3\2\2\2#\u0137\3\2\2\2%\u013e\3\2\2\2")
+ buf.write("\'\u0145\3\2\2\2)\u014e\3\2\2\2+\u0150\3\2\2\2-\u0157")
+ buf.write("\3\2\2\2/\u015d\3\2\2\2\61\u015f\3\2\2\2\63\u0164\3\2")
+ buf.write("\2\2\65\u016a\3\2\2\2\67\u0173\3\2\2\29\u0176\3\2\2\2")
+ buf.write(";\u017a\3\2\2\2=\u0183\3\2\2\2?\u0189\3\2\2\2A\u0193\3")
+ buf.write("\2\2\2C\u019c\3\2\2\2E\u01ba\3\2\2\2G\u01c1\3\2\2\2I\u01d1")
+ buf.write("\3\2\2\2K\u01e4\3\2\2\2M\u01eb\3\2\2\2O\u01ed\3\2\2\2")
+ buf.write("Q\u01ef\3\2\2\2S\u01f1\3\2\2\2U\u01f3\3\2\2\2W\u01f5\3")
+ buf.write("\2\2\2Y\u01f9\3\2\2\2[\u01fb\3\2\2\2]\u01fd\3\2\2\2_\u01ff")
+ buf.write("\3\2\2\2a\u0201\3\2\2\2c\u0204\3\2\2\2e\u0207\3\2\2\2")
+ buf.write("g\u020e\3\2\2\2i\u0210\3\2\2\2k\u0213\3\2\2\2m\u0215\3")
+ buf.write("\2\2\2o\u0217\3\2\2\2q\u0219\3\2\2\2s\u021c\3\2\2\2u\u021f")
+ buf.write("\3\2\2\2w\u0222\3\2\2\2y\u0225\3\2\2\2{\u0228\3\2\2\2")
+ buf.write("}\u022c\3\2\2\2\177\u0230\3\2\2\2\u0081\u0233\3\2\2\2")
+ buf.write("\u0083\u0236\3\2\2\2\u0085\u0239\3\2\2\2\u0087\u023b\3")
+ buf.write("\2\2\2\u0089\u023e\3\2\2\2\u008b\u0241\3\2\2\2\u008d\u0243")
+ buf.write("\3\2\2\2\u008f\u0245\3\2\2\2\u0091\u0248\3\2\2\2\u0093")
+ buf.write("\u024b\3\2\2\2\u0095\u024d\3\2\2\2\u0097\u024f\3\2\2\2")
+ buf.write("\u0099\u0252\3\2\2\2\u009b\u0255\3\2\2\2\u009d\u0258\3")
+ buf.write("\2\2\2\u009f\u025b\3\2\2\2\u00a1\u0263\3\2\2\2\u00a3\u0268")
+ buf.write("\3\2\2\2\u00a5\u026e\3\2\2\2\u00a7\u0273\3\2\2\2\u00a9")
+ buf.write("\u027b\3\2\2\2\u00ab\u027e\3\2\2\2\u00ad\u0283\3\2\2\2")
+ buf.write("\u00af\u028a\3\2\2\2\u00b1\u0290\3\2\2\2\u00b3\u0293\3")
+ buf.write("\2\2\2\u00b5\u0298\3\2\2\2\u00b7\u02a1\3\2\2\2\u00b9\u02a7")
+ buf.write("\3\2\2\2\u00bb\u02ae\3\2\2\2\u00bd\u02b6\3\2\2\2\u00bf")
+ buf.write("\u02b9\3\2\2\2\u00c1\u02c3\3\2\2\2\u00c3\u02cf\3\2\2\2")
+ buf.write("\u00c5\u02e1\3\2\2\2\u00c7\u02e6\3\2\2\2\u00c9\u02ef\3")
+ buf.write("\2\2\2\u00cb\u02f7\3\2\2\2\u00cd\u0329\3\2\2\2\u00cf\u032b")
+ buf.write("\3\2\2\2\u00d1\u0334\3\2\2\2\u00d3\u0339\3\2\2\2\u00d5")
+ buf.write("\u0344\3\2\2\2\u00d7\u0346\3\2\2\2\u00d9\u034d\3\2\2\2")
+ buf.write("\u00db\u0351\3\2\2\2\u00dd\u0355\3\2\2\2\u00df\u0357\3")
+ buf.write("\2\2\2\u00e1\u0365\3\2\2\2\u00e3\u0375\3\2\2\2\u00e5\u00e6")
+ buf.write("\7}\2\2\u00e6\4\3\2\2\2\u00e7\u00e8\7=\2\2\u00e8\6\3\2")
+ buf.write("\2\2\u00e9\u00ea\7v\2\2\u00ea\u00eb\7{\2\2\u00eb\u00ec")
+ buf.write("\7r\2\2\u00ec\u00ed\7g\2\2\u00ed\u00ee\7f\2\2\u00ee\u00ef")
+ buf.write("\7g\2\2\u00ef\u00f0\7h\2\2\u00f0\b\3\2\2\2\u00f1\u00f2")
+ buf.write("\7.\2\2\u00f2\n\3\2\2\2\u00f3\u00f4\7?\2\2\u00f4\f\3\2")
+ buf.write("\2\2\u00f5\u00f6\7g\2\2\u00f6\u00f7\7z\2\2\u00f7\u00f8")
+ buf.write("\7v\2\2\u00f8\u00f9\7g\2\2\u00f9\u00fa\7t\2\2\u00fa\u00fb")
+ buf.write("\7p\2\2\u00fb\16\3\2\2\2\u00fc\u00fd\7u\2\2\u00fd\u00fe")
+ buf.write("\7v\2\2\u00fe\u00ff\7c\2\2\u00ff\u0100\7v\2\2\u0100\u0101")
+ buf.write("\7k\2\2\u0101\u0102\7e\2\2\u0102\20\3\2\2\2\u0103\u0104")
+ buf.write("\7c\2\2\u0104\u0105\7w\2\2\u0105\u0106\7v\2\2\u0106\u0107")
+ buf.write("\7q\2\2\u0107\22\3\2\2\2\u0108\u0109\7t\2\2\u0109\u010a")
+ buf.write("\7g\2\2\u010a\u010b\7i\2\2\u010b\u010c\7k\2\2\u010c\u010d")
+ buf.write("\7u\2\2\u010d\u010e\7v\2\2\u010e\u010f\7g\2\2\u010f\u0110")
+ buf.write("\7t\2\2\u0110\24\3\2\2\2\u0111\u0112\7U\2\2\u0112\u0113")
+ buf.write("\7V\2\2\u0113\u0114\7C\2\2\u0114\u0115\7V\2\2\u0115\u0116")
+ buf.write("\7K\2\2\u0116\u0117\7E\2\2\u0117\26\3\2\2\2\u0118\u0119")
+ buf.write("\7x\2\2\u0119\u011a\7q\2\2\u011a\u011b\7k\2\2\u011b\u011c")
+ buf.write("\7f\2\2\u011c\30\3\2\2\2\u011d\u011e\7e\2\2\u011e\u011f")
+ buf.write("\7j\2\2\u011f\u0120\7c\2\2\u0120\u0121\7t\2\2\u0121\32")
+ buf.write("\3\2\2\2\u0122\u0123\7u\2\2\u0123\u0124\7j\2\2\u0124\u0125")
+ buf.write("\7q\2\2\u0125\u0126\7t\2\2\u0126\u0127\7v\2\2\u0127\34")
+ buf.write("\3\2\2\2\u0128\u0129\7k\2\2\u0129\u012a\7p\2\2\u012a\u012b")
+ buf.write("\7v\2\2\u012b\36\3\2\2\2\u012c\u012d\7n\2\2\u012d\u012e")
+ buf.write("\7q\2\2\u012e\u012f\7p\2\2\u012f\u0130\7i\2\2\u0130 \3")
+ buf.write("\2\2\2\u0131\u0132\7h\2\2\u0132\u0133\7n\2\2\u0133\u0134")
+ buf.write("\7q\2\2\u0134\u0135\7c\2\2\u0135\u0136\7v\2\2\u0136\"")
+ buf.write("\3\2\2\2\u0137\u0138\7f\2\2\u0138\u0139\7q\2\2\u0139\u013a")
+ buf.write("\7w\2\2\u013a\u013b\7d\2\2\u013b\u013c\7n\2\2\u013c\u013d")
+ buf.write("\7g\2\2\u013d$\3\2\2\2\u013e\u013f\7u\2\2\u013f\u0140")
+ buf.write("\7k\2\2\u0140\u0141\7i\2\2\u0141\u0142\7p\2\2\u0142\u0143")
+ buf.write("\7g\2\2\u0143\u0144\7f\2\2\u0144&\3\2\2\2\u0145\u0146")
+ buf.write("\7w\2\2\u0146\u0147\7p\2\2\u0147\u0148\7u\2\2\u0148\u0149")
+ buf.write("\7k\2\2\u0149\u014a\7i\2\2\u014a\u014b\7p\2\2\u014b\u014c")
+ buf.write("\7g\2\2\u014c\u014d\7f\2\2\u014d(\3\2\2\2\u014e\u014f")
+ buf.write("\7\177\2\2\u014f*\3\2\2\2\u0150\u0151\7u\2\2\u0151\u0152")
+ buf.write("\7v\2\2\u0152\u0153\7t\2\2\u0153\u0154\7w\2\2\u0154\u0155")
+ buf.write("\7e\2\2\u0155\u0156\7v\2\2\u0156,\3\2\2\2\u0157\u0158")
+ buf.write("\7w\2\2\u0158\u0159\7p\2\2\u0159\u015a\7k\2\2\u015a\u015b")
+ buf.write("\7q\2\2\u015b\u015c\7p\2\2\u015c.\3\2\2\2\u015d\u015e")
+ buf.write("\7<\2\2\u015e\60\3\2\2\2\u015f\u0160\7g\2\2\u0160\u0161")
+ buf.write("\7p\2\2\u0161\u0162\7w\2\2\u0162\u0163\7o\2\2\u0163\62")
+ buf.write("\3\2\2\2\u0164\u0165\7e\2\2\u0165\u0166\7q\2\2\u0166\u0167")
+ buf.write("\7p\2\2\u0167\u0168\7u\2\2\u0168\u0169\7v\2\2\u0169\64")
+ buf.write("\3\2\2\2\u016a\u016b\7x\2\2\u016b\u016c\7q\2\2\u016c\u016d")
+ buf.write("\7n\2\2\u016d\u016e\7c\2\2\u016e\u016f\7v\2\2\u016f\u0170")
+ buf.write("\7k\2\2\u0170\u0171\7n\2\2\u0171\u0172\7g\2\2\u0172\66")
+ buf.write("\3\2\2\2\u0173\u0174\7K\2\2\u0174\u0175\7P\2\2\u01758")
+ buf.write("\3\2\2\2\u0176\u0177\7Q\2\2\u0177\u0178\7W\2\2\u0178\u0179")
+ buf.write("\7V\2\2\u0179:\3\2\2\2\u017a\u017b\7Q\2\2\u017b\u017c")
+ buf.write("\7R\2\2\u017c\u017d\7V\2\2\u017d\u017e\7K\2\2\u017e\u017f")
+ buf.write("\7Q\2\2\u017f\u0180\7P\2\2\u0180\u0181\7C\2\2\u0181\u0182")
+ buf.write("\7N\2\2\u0182<\3\2\2\2\u0183\u0184\7E\2\2\u0184\u0185")
+ buf.write("\7Q\2\2\u0185\u0186\7P\2\2\u0186\u0187\7U\2\2\u0187\u0188")
+ buf.write("\7V\2\2\u0188>\3\2\2\2\u0189\u018a\7W\2\2\u018a\u018b")
+ buf.write("\7P\2\2\u018b\u018c\7C\2\2\u018c\u018d\7N\2\2\u018d\u018e")
+ buf.write("\7K\2\2\u018e\u018f\7I\2\2\u018f\u0190\7P\2\2\u0190\u0191")
+ buf.write("\7G\2\2\u0191\u0192\7F\2\2\u0192@\3\2\2\2\u0193\u0194")
+ buf.write("\7X\2\2\u0194\u0195\7Q\2\2\u0195\u0196\7N\2\2\u0196\u0197")
+ buf.write("\7C\2\2\u0197\u0198\7V\2\2\u0198\u0199\7K\2\2\u0199\u019a")
+ buf.write("\7N\2\2\u019a\u019b\7G\2\2\u019bB\3\2\2\2\u019c\u019d")
+ buf.write("\7I\2\2\u019d\u019e\7N\2\2\u019e\u019f\7Q\2\2\u019f\u01a0")
+ buf.write("\7D\2\2\u01a0\u01a1\7C\2\2\u01a1\u01a2\7N\2\2\u01a2\u01a3")
+ buf.write("\7a\2\2\u01a3\u01a4\7T\2\2\u01a4\u01a5\7G\2\2\u01a5\u01a6")
+ buf.write("\7O\2\2\u01a6\u01a7\7Q\2\2\u01a7\u01a8\7X\2\2\u01a8\u01a9")
+ buf.write("\7G\2\2\u01a9\u01aa\7a\2\2\u01aa\u01ab\7K\2\2\u01ab\u01ac")
+ buf.write("\7H\2\2\u01ac\u01ad\7a\2\2\u01ad\u01ae\7W\2\2\u01ae\u01af")
+ buf.write("\7P\2\2\u01af\u01b0\7T\2\2\u01b0\u01b1\7G\2\2\u01b1\u01b2")
+ buf.write("\7H\2\2\u01b2\u01b3\7G\2\2\u01b3\u01b4\7T\2\2\u01b4\u01b5")
+ buf.write("\7G\2\2\u01b5\u01b6\7P\2\2\u01b6\u01b7\7E\2\2\u01b7\u01b8")
+ buf.write("\7G\2\2\u01b8\u01b9\7F\2\2\u01b9D\3\2\2\2\u01ba\u01bb")
+ buf.write("\7G\2\2\u01bb\u01bc\7H\2\2\u01bc\u01bd\7K\2\2\u01bd\u01be")
+ buf.write("\7C\2\2\u01be\u01bf\7R\2\2\u01bf\u01c0\7K\2\2\u01c0F\3")
+ buf.write("\2\2\2\u01c1\u01c2\7G\2\2\u01c2\u01c3\7H\2\2\u01c3\u01c4")
+ buf.write("\7K\2\2\u01c4\u01c5\7a\2\2\u01c5\u01c6\7D\2\2\u01c6\u01c7")
+ buf.write("\7Q\2\2\u01c7\u01c8\7Q\2\2\u01c8\u01c9\7V\2\2\u01c9\u01ca")
+ buf.write("\7U\2\2\u01ca\u01cb\7G\2\2\u01cb\u01cc\7T\2\2\u01cc\u01cd")
+ buf.write("\7X\2\2\u01cd\u01ce\7K\2\2\u01ce\u01cf\7E\2\2\u01cf\u01d0")
+ buf.write("\7G\2\2\u01d0H\3\2\2\2\u01d1\u01d2\7G\2\2\u01d2\u01d3")
+ buf.write("\7H\2\2\u01d3\u01d4\7K\2\2\u01d4\u01d5\7a\2\2\u01d5\u01d6")
+ buf.write("\7T\2\2\u01d6\u01d7\7W\2\2\u01d7\u01d8\7P\2\2\u01d8\u01d9")
+ buf.write("\7V\2\2\u01d9\u01da\7K\2\2\u01da\u01db\7O\2\2\u01db\u01dc")
+ buf.write("\7G\2\2\u01dc\u01dd\7U\2\2\u01dd\u01de\7G\2\2\u01de\u01df")
+ buf.write("\7T\2\2\u01df\u01e0\7X\2\2\u01e0\u01e1\7K\2\2\u01e1\u01e2")
+ buf.write("\7E\2\2\u01e2\u01e3\7G\2\2\u01e3J\3\2\2\2\u01e4\u01e5")
+ buf.write("\7R\2\2\u01e5\u01e6\7C\2\2\u01e6\u01e7\7E\2\2\u01e7\u01e8")
+ buf.write("\7M\2\2\u01e8\u01e9\7G\2\2\u01e9\u01ea\7F\2\2\u01eaL\3")
+ buf.write("\2\2\2\u01eb\u01ec\7*\2\2\u01ecN\3\2\2\2\u01ed\u01ee\7")
+ buf.write("+\2\2\u01eeP\3\2\2\2\u01ef\u01f0\7]\2\2\u01f0R\3\2\2\2")
+ buf.write("\u01f1\u01f2\7_\2\2\u01f2T\3\2\2\2\u01f3\u01f4\7,\2\2")
+ buf.write("\u01f4V\3\2\2\2\u01f5\u01f6\7\60\2\2\u01f6\u01f7\7\60")
+ buf.write("\2\2\u01f7\u01f8\7\60\2\2\u01f8X\3\2\2\2\u01f9\u01fa\7")
+ buf.write("-\2\2\u01faZ\3\2\2\2\u01fb\u01fc\7/\2\2\u01fc\\\3\2\2")
+ buf.write("\2\u01fd\u01fe\7\61\2\2\u01fe^\3\2\2\2\u01ff\u0200\7\'")
+ buf.write("\2\2\u0200`\3\2\2\2\u0201\u0202\7-\2\2\u0202\u0203\7-")
+ buf.write("\2\2\u0203b\3\2\2\2\u0204\u0205\7/\2\2\u0205\u0206\7/")
+ buf.write("\2\2\u0206d\3\2\2\2\u0207\u0208\7u\2\2\u0208\u0209\7k")
+ buf.write("\2\2\u0209\u020a\7|\2\2\u020a\u020b\7g\2\2\u020b\u020c")
+ buf.write("\7q\2\2\u020c\u020d\7h\2\2\u020df\3\2\2\2\u020e\u020f")
+ buf.write("\7\60\2\2\u020fh\3\2\2\2\u0210\u0211\7/\2\2\u0211\u0212")
+ buf.write("\7@\2\2\u0212j\3\2\2\2\u0213\u0214\7(\2\2\u0214l\3\2\2")
+ buf.write("\2\u0215\u0216\7\u0080\2\2\u0216n\3\2\2\2\u0217\u0218")
+ buf.write("\7#\2\2\u0218p\3\2\2\2\u0219\u021a\7,\2\2\u021a\u021b")
+ buf.write("\7?\2\2\u021br\3\2\2\2\u021c\u021d\7\61\2\2\u021d\u021e")
+ buf.write("\7?\2\2\u021et\3\2\2\2\u021f\u0220\7\'\2\2\u0220\u0221")
+ buf.write("\7?\2\2\u0221v\3\2\2\2\u0222\u0223\7-\2\2\u0223\u0224")
+ buf.write("\7?\2\2\u0224x\3\2\2\2\u0225\u0226\7/\2\2\u0226\u0227")
+ buf.write("\7?\2\2\u0227z\3\2\2\2\u0228\u0229\7>\2\2\u0229\u022a")
+ buf.write("\7>\2\2\u022a\u022b\7?\2\2\u022b|\3\2\2\2\u022c\u022d")
+ buf.write("\7@\2\2\u022d\u022e\7@\2\2\u022e\u022f\7?\2\2\u022f~\3")
+ buf.write("\2\2\2\u0230\u0231\7(\2\2\u0231\u0232\7?\2\2\u0232\u0080")
+ buf.write("\3\2\2\2\u0233\u0234\7`\2\2\u0234\u0235\7?\2\2\u0235\u0082")
+ buf.write("\3\2\2\2\u0236\u0237\7~\2\2\u0237\u0238\7?\2\2\u0238\u0084")
+ buf.write("\3\2\2\2\u0239\u023a\7A\2\2\u023a\u0086\3\2\2\2\u023b")
+ buf.write("\u023c\7~\2\2\u023c\u023d\7~\2\2\u023d\u0088\3\2\2\2\u023e")
+ buf.write("\u023f\7(\2\2\u023f\u0240\7(\2\2\u0240\u008a\3\2\2\2\u0241")
+ buf.write("\u0242\7~\2\2\u0242\u008c\3\2\2\2\u0243\u0244\7`\2\2\u0244")
+ buf.write("\u008e\3\2\2\2\u0245\u0246\7?\2\2\u0246\u0247\7?\2\2\u0247")
+ buf.write("\u0090\3\2\2\2\u0248\u0249\7#\2\2\u0249\u024a\7?\2\2\u024a")
+ buf.write("\u0092\3\2\2\2\u024b\u024c\7>\2\2\u024c\u0094\3\2\2\2")
+ buf.write("\u024d\u024e\7@\2\2\u024e\u0096\3\2\2\2\u024f\u0250\7")
+ buf.write(">\2\2\u0250\u0251\7?\2\2\u0251\u0098\3\2\2\2\u0252\u0253")
+ buf.write("\7@\2\2\u0253\u0254\7?\2\2\u0254\u009a\3\2\2\2\u0255\u0256")
+ buf.write("\7>\2\2\u0256\u0257\7>\2\2\u0257\u009c\3\2\2\2\u0258\u0259")
+ buf.write("\7@\2\2\u0259\u025a\7@\2\2\u025a\u009e\3\2\2\2\u025b\u025c")
+ buf.write("\7a\2\2\u025c\u025d\7a\2\2\u025d\u025e\7c\2\2\u025e\u025f")
+ buf.write("\7u\2\2\u025f\u0260\7o\2\2\u0260\u0261\7a\2\2\u0261\u0262")
+ buf.write("\7a\2\2\u0262\u00a0\3\2\2\2\u0263\u0264\7a\2\2\u0264\u0265")
+ buf.write("\7c\2\2\u0265\u0266\7u\2\2\u0266\u0267\7o\2\2\u0267\u00a2")
+ buf.write("\3\2\2\2\u0268\u0269\7a\2\2\u0269\u026a\7a\2\2\u026a\u026b")
+ buf.write("\7c\2\2\u026b\u026c\7u\2\2\u026c\u026d\7o\2\2\u026d\u00a4")
+ buf.write("\3\2\2\2\u026e\u026f\7e\2\2\u026f\u0270\7c\2\2\u0270\u0271")
+ buf.write("\7u\2\2\u0271\u0272\7g\2\2\u0272\u00a6\3\2\2\2\u0273\u0274")
+ buf.write("\7f\2\2\u0274\u0275\7g\2\2\u0275\u0276\7h\2\2\u0276\u0277")
+ buf.write("\7c\2\2\u0277\u0278\7w\2\2\u0278\u0279\7n\2\2\u0279\u027a")
+ buf.write("\7v\2\2\u027a\u00a8\3\2\2\2\u027b\u027c\7k\2\2\u027c\u027d")
+ buf.write("\7h\2\2\u027d\u00aa\3\2\2\2\u027e\u027f\7g\2\2\u027f\u0280")
+ buf.write("\7n\2\2\u0280\u0281\7u\2\2\u0281\u0282\7g\2\2\u0282\u00ac")
+ buf.write("\3\2\2\2\u0283\u0284\7u\2\2\u0284\u0285\7y\2\2\u0285\u0286")
+ buf.write("\7k\2\2\u0286\u0287\7v\2\2\u0287\u0288\7e\2\2\u0288\u0289")
+ buf.write("\7j\2\2\u0289\u00ae\3\2\2\2\u028a\u028b\7y\2\2\u028b\u028c")
+ buf.write("\7j\2\2\u028c\u028d\7k\2\2\u028d\u028e\7n\2\2\u028e\u028f")
+ buf.write("\7g\2\2\u028f\u00b0\3\2\2\2\u0290\u0291\7f\2\2\u0291\u0292")
+ buf.write("\7q\2\2\u0292\u00b2\3\2\2\2\u0293\u0294\7i\2\2\u0294\u0295")
+ buf.write("\7q\2\2\u0295\u0296\7v\2\2\u0296\u0297\7q\2\2\u0297\u00b4")
+ buf.write("\3\2\2\2\u0298\u0299\7e\2\2\u0299\u029a\7q\2\2\u029a\u029b")
+ buf.write("\7p\2\2\u029b\u029c\7v\2\2\u029c\u029d\7k\2\2\u029d\u029e")
+ buf.write("\7p\2\2\u029e\u029f\7w\2\2\u029f\u02a0\7g\2\2\u02a0\u00b6")
+ buf.write("\3\2\2\2\u02a1\u02a2\7d\2\2\u02a2\u02a3\7t\2\2\u02a3\u02a4")
+ buf.write("\7g\2\2\u02a4\u02a5\7c\2\2\u02a5\u02a6\7m\2\2\u02a6\u00b8")
+ buf.write("\3\2\2\2\u02a7\u02a8\7t\2\2\u02a8\u02a9\7g\2\2\u02a9\u02aa")
+ buf.write("\7v\2\2\u02aa\u02ab\7w\2\2\u02ab\u02ac\7t\2\2\u02ac\u02ad")
+ buf.write("\7p\2\2\u02ad\u00ba\3\2\2\2\u02ae\u02b3\5\u00bd_\2\u02af")
+ buf.write("\u02b2\5\u00bd_\2\u02b0\u02b2\4\62;\2\u02b1\u02af\3\2")
+ buf.write("\2\2\u02b1\u02b0\3\2\2\2\u02b2\u02b5\3\2\2\2\u02b3\u02b1")
+ buf.write("\3\2\2\2\u02b3\u02b4\3\2\2\2\u02b4\u00bc\3\2\2\2\u02b5")
+ buf.write("\u02b3\3\2\2\2\u02b6\u02b7\t\2\2\2\u02b7\u00be\3\2\2\2")
+ buf.write("\u02b8\u02ba\7N\2\2\u02b9\u02b8\3\2\2\2\u02b9\u02ba\3")
+ buf.write("\2\2\2\u02ba\u02bb\3\2\2\2\u02bb\u02be\7)\2\2\u02bc\u02bf")
+ buf.write("\5\u00d3j\2\u02bd\u02bf\n\3\2\2\u02be\u02bc\3\2\2\2\u02be")
+ buf.write("\u02bd\3\2\2\2\u02bf\u02c0\3\2\2\2\u02c0\u02c1\7)\2\2")
+ buf.write("\u02c1\u00c0\3\2\2\2\u02c2\u02c4\7N\2\2\u02c3\u02c2\3")
+ buf.write("\2\2\2\u02c3\u02c4\3\2\2\2\u02c4\u02c5\3\2\2\2\u02c5\u02ca")
+ buf.write("\7$\2\2\u02c6\u02c9\5\u00d3j\2\u02c7\u02c9\n\4\2\2\u02c8")
+ buf.write("\u02c6\3\2\2\2\u02c8\u02c7\3\2\2\2\u02c9\u02cc\3\2\2\2")
+ buf.write("\u02ca\u02c8\3\2\2\2\u02ca\u02cb\3\2\2\2\u02cb\u02cd\3")
+ buf.write("\2\2\2\u02cc\u02ca\3\2\2\2\u02cd\u02ce\7$\2\2\u02ce\u00c2")
+ buf.write("\3\2\2\2\u02cf\u02d0\7\62\2\2\u02d0\u02d2\t\5\2\2\u02d1")
+ buf.write("\u02d3\5\u00c9e\2\u02d2\u02d1\3\2\2\2\u02d3\u02d4\3\2")
+ buf.write("\2\2\u02d4\u02d2\3\2\2\2\u02d4\u02d5\3\2\2\2\u02d5\u02d7")
+ buf.write("\3\2\2\2\u02d6\u02d8\5\u00cbf\2\u02d7\u02d6\3\2\2\2\u02d7")
+ buf.write("\u02d8\3\2\2\2\u02d8\u00c4\3\2\2\2\u02d9\u02e2\7\62\2")
+ buf.write("\2\u02da\u02de\4\63;\2\u02db\u02dd\4\62;\2\u02dc\u02db")
+ buf.write("\3\2\2\2\u02dd\u02e0\3\2\2\2\u02de\u02dc\3\2\2\2\u02de")
+ buf.write("\u02df\3\2\2\2\u02df\u02e2\3\2\2\2\u02e0\u02de\3\2\2\2")
+ buf.write("\u02e1\u02d9\3\2\2\2\u02e1\u02da\3\2\2\2\u02e2\u02e4\3")
+ buf.write("\2\2\2\u02e3\u02e5\5\u00cbf\2\u02e4\u02e3\3\2\2\2\u02e4")
+ buf.write("\u02e5\3\2\2\2\u02e5\u00c6\3\2\2\2\u02e6\u02e8\7\62\2")
+ buf.write("\2\u02e7\u02e9\4\629\2\u02e8\u02e7\3\2\2\2\u02e9\u02ea")
+ buf.write("\3\2\2\2\u02ea\u02e8\3\2\2\2\u02ea\u02eb\3\2\2\2\u02eb")
+ buf.write("\u02ed\3\2\2\2\u02ec\u02ee\5\u00cbf\2\u02ed\u02ec\3\2")
+ buf.write("\2\2\u02ed\u02ee\3\2\2\2\u02ee\u00c8\3\2\2\2\u02ef\u02f0")
+ buf.write("\t\6\2\2\u02f0\u00ca\3\2\2\2\u02f1\u02f8\t\7\2\2\u02f2")
+ buf.write("\u02f3\t\b\2\2\u02f3\u02f8\t\t\2\2\u02f4\u02f5\t\b\2\2")
+ buf.write("\u02f5\u02f6\t\t\2\2\u02f6\u02f8\t\t\2\2\u02f7\u02f1\3")
+ buf.write("\2\2\2\u02f7\u02f2\3\2\2\2\u02f7\u02f4\3\2\2\2\u02f8\u00cc")
+ buf.write("\3\2\2\2\u02f9\u02fb\4\62;\2\u02fa\u02f9\3\2\2\2\u02fb")
+ buf.write("\u02fc\3\2\2\2\u02fc\u02fa\3\2\2\2\u02fc\u02fd\3\2\2\2")
+ buf.write("\u02fd\u02fe\3\2\2\2\u02fe\u0302\7\60\2\2\u02ff\u0301")
+ buf.write("\4\62;\2\u0300\u02ff\3\2\2\2\u0301\u0304\3\2\2\2\u0302")
+ buf.write("\u0300\3\2\2\2\u0302\u0303\3\2\2\2\u0303\u0306\3\2\2\2")
+ buf.write("\u0304\u0302\3\2\2\2\u0305\u0307\5\u00cfh\2\u0306\u0305")
+ buf.write("\3\2\2\2\u0306\u0307\3\2\2\2\u0307\u0309\3\2\2\2\u0308")
+ buf.write("\u030a\5\u00d1i\2\u0309\u0308\3\2\2\2\u0309\u030a\3\2")
+ buf.write("\2\2\u030a\u032a\3\2\2\2\u030b\u030d\7\60\2\2\u030c\u030e")
+ buf.write("\4\62;\2\u030d\u030c\3\2\2\2\u030e\u030f\3\2\2\2\u030f")
+ buf.write("\u030d\3\2\2\2\u030f\u0310\3\2\2\2\u0310\u0312\3\2\2\2")
+ buf.write("\u0311\u0313\5\u00cfh\2\u0312\u0311\3\2\2\2\u0312\u0313")
+ buf.write("\3\2\2\2\u0313\u0315\3\2\2\2\u0314\u0316\5\u00d1i\2\u0315")
+ buf.write("\u0314\3\2\2\2\u0315\u0316\3\2\2\2\u0316\u032a\3\2\2\2")
+ buf.write("\u0317\u0319\4\62;\2\u0318\u0317\3\2\2\2\u0319\u031a\3")
+ buf.write("\2\2\2\u031a\u0318\3\2\2\2\u031a\u031b\3\2\2\2\u031b\u031c")
+ buf.write("\3\2\2\2\u031c\u031e\5\u00cfh\2\u031d\u031f\5\u00d1i\2")
+ buf.write("\u031e\u031d\3\2\2\2\u031e\u031f\3\2\2\2\u031f\u032a\3")
+ buf.write("\2\2\2\u0320\u0322\4\62;\2\u0321\u0320\3\2\2\2\u0322\u0323")
+ buf.write("\3\2\2\2\u0323\u0321\3\2\2\2\u0323\u0324\3\2\2\2\u0324")
+ buf.write("\u0326\3\2\2\2\u0325\u0327\5\u00cfh\2\u0326\u0325\3\2")
+ buf.write("\2\2\u0326\u0327\3\2\2\2\u0327\u0328\3\2\2\2\u0328\u032a")
+ buf.write("\5\u00d1i\2\u0329\u02fa\3\2\2\2\u0329\u030b\3\2\2\2\u0329")
+ buf.write("\u0318\3\2\2\2\u0329\u0321\3\2\2\2\u032a\u00ce\3\2\2\2")
+ buf.write("\u032b\u032d\t\n\2\2\u032c\u032e\t\13\2\2\u032d\u032c")
+ buf.write("\3\2\2\2\u032d\u032e\3\2\2\2\u032e\u0330\3\2\2\2\u032f")
+ buf.write("\u0331\4\62;\2\u0330\u032f\3\2\2\2\u0331\u0332\3\2\2\2")
+ buf.write("\u0332\u0330\3\2\2\2\u0332\u0333\3\2\2\2\u0333\u00d0\3")
+ buf.write("\2\2\2\u0334\u0335\t\f\2\2\u0335\u00d2\3\2\2\2\u0336\u0337")
+ buf.write("\7^\2\2\u0337\u033a\t\r\2\2\u0338\u033a\5\u00d5k\2\u0339")
+ buf.write("\u0336\3\2\2\2\u0339\u0338\3\2\2\2\u033a\u00d4\3\2\2\2")
+ buf.write("\u033b\u033c\7^\2\2\u033c\u033d\4\62\65\2\u033d\u033e")
+ buf.write("\4\629\2\u033e\u0345\4\629\2\u033f\u0340\7^\2\2\u0340")
+ buf.write("\u0341\4\629\2\u0341\u0345\4\629\2\u0342\u0343\7^\2\2")
+ buf.write("\u0343\u0345\4\629\2\u0344\u033b\3\2\2\2\u0344\u033f\3")
+ buf.write("\2\2\2\u0344\u0342\3\2\2\2\u0345\u00d6\3\2\2\2\u0346\u0347")
+ buf.write("\7^\2\2\u0347\u0348\7w\2\2\u0348\u0349\5\u00c9e\2\u0349")
+ buf.write("\u034a\5\u00c9e\2\u034a\u034b\5\u00c9e\2\u034b\u034c\5")
+ buf.write("\u00c9e\2\u034c\u00d8\3\2\2\2\u034d\u034e\t\16\2\2\u034e")
+ buf.write("\u034f\3\2\2\2\u034f\u0350\bm\2\2\u0350\u00da\3\2\2\2")
+ buf.write("\u0351\u0352\7^\2\2\u0352\u0353\3\2\2\2\u0353\u0354\b")
+ buf.write("n\2\2\u0354\u00dc\3\2\2\2\u0355\u0356\4\5\0\2\u0356\u00de")
+ buf.write("\3\2\2\2\u0357\u0358\7\61\2\2\u0358\u0359\7,\2\2\u0359")
+ buf.write("\u035d\3\2\2\2\u035a\u035c\13\2\2\2\u035b\u035a\3\2\2")
+ buf.write("\2\u035c\u035f\3\2\2\2\u035d\u035e\3\2\2\2\u035d\u035b")
+ buf.write("\3\2\2\2\u035e\u0360\3\2\2\2\u035f\u035d\3\2\2\2\u0360")
+ buf.write("\u0361\7,\2\2\u0361\u0362\7\61\2\2\u0362\u0363\3\2\2\2")
+ buf.write("\u0363\u0364\bp\2\2\u0364\u00e0\3\2\2\2\u0365\u0366\7")
+ buf.write("\61\2\2\u0366\u0367\7\61\2\2\u0367\u036b\3\2\2\2\u0368")
+ buf.write("\u036a\n\17\2\2\u0369\u0368\3\2\2\2\u036a\u036d\3\2\2")
+ buf.write("\2\u036b\u0369\3\2\2\2\u036b\u036c\3\2\2\2\u036c\u036f")
+ buf.write("\3\2\2\2\u036d\u036b\3\2\2\2\u036e\u0370\7\17\2\2\u036f")
+ buf.write("\u036e\3\2\2\2\u036f\u0370\3\2\2\2\u0370\u0371\3\2\2\2")
+ buf.write("\u0371\u0372\7\f\2\2\u0372\u0373\3\2\2\2\u0373\u0374\b")
+ buf.write("q\2\2\u0374\u00e2\3\2\2\2\u0375\u0379\7%\2\2\u0376\u0378")
+ buf.write("\n\17\2\2\u0377\u0376\3\2\2\2\u0378\u037b\3\2\2\2\u0379")
+ buf.write("\u0377\3\2\2\2\u0379\u037a\3\2\2\2\u037a\u037d\3\2\2\2")
+ buf.write("\u037b\u0379\3\2\2\2\u037c\u037e\7\17\2\2\u037d\u037c")
+ buf.write("\3\2\2\2\u037d\u037e\3\2\2\2\u037e\u037f\3\2\2\2\u037f")
+ buf.write("\u0380\7\f\2\2\u0380\u0381\3\2\2\2\u0381\u0382\br\2\2")
+ buf.write("\u0382\u00e4\3\2\2\2\'\2\u02b1\u02b3\u02b9\u02be\u02c3")
+ buf.write("\u02c8\u02ca\u02d4\u02d7\u02de\u02e1\u02e4\u02ea\u02ed")
+ buf.write("\u02f7\u02fc\u0302\u0306\u0309\u030f\u0312\u0315\u031a")
+ buf.write("\u031e\u0323\u0326\u0329\u032d\u0332\u0339\u0344\u035d")
+ buf.write("\u036b\u036f\u0379\u037d\3\2\3\2")
+ return buf.getvalue()
+
+
+class CLexer(Lexer):
+
+ atn = ATNDeserializer().deserialize(serializedATN())
+
+ decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
+
+ T__0 = 1
+ T__1 = 2
+ T__2 = 3
+ T__3 = 4
+ T__4 = 5
+ T__5 = 6
+ T__6 = 7
+ T__7 = 8
+ T__8 = 9
+ T__9 = 10
+ T__10 = 11
+ T__11 = 12
+ T__12 = 13
+ T__13 = 14
+ T__14 = 15
+ T__15 = 16
+ T__16 = 17
+ T__17 = 18
+ T__18 = 19
+ T__19 = 20
+ T__20 = 21
+ T__21 = 22
+ T__22 = 23
+ T__23 = 24
+ T__24 = 25
+ T__25 = 26
+ T__26 = 27
+ T__27 = 28
+ T__28 = 29
+ T__29 = 30
+ T__30 = 31
+ T__31 = 32
+ T__32 = 33
+ T__33 = 34
+ T__34 = 35
+ T__35 = 36
+ T__36 = 37
+ T__37 = 38
+ T__38 = 39
+ T__39 = 40
+ T__40 = 41
+ T__41 = 42
+ T__42 = 43
+ T__43 = 44
+ T__44 = 45
+ T__45 = 46
+ T__46 = 47
+ T__47 = 48
+ T__48 = 49
+ T__49 = 50
+ T__50 = 51
+ T__51 = 52
+ T__52 = 53
+ T__53 = 54
+ T__54 = 55
+ T__55 = 56
+ T__56 = 57
+ T__57 = 58
+ T__58 = 59
+ T__59 = 60
+ T__60 = 61
+ T__61 = 62
+ T__62 = 63
+ T__63 = 64
+ T__64 = 65
+ T__65 = 66
+ T__66 = 67
+ T__67 = 68
+ T__68 = 69
+ T__69 = 70
+ T__70 = 71
+ T__71 = 72
+ T__72 = 73
+ T__73 = 74
+ T__74 = 75
+ T__75 = 76
+ T__76 = 77
+ T__77 = 78
+ T__78 = 79
+ T__79 = 80
+ T__80 = 81
+ T__81 = 82
+ T__82 = 83
+ T__83 = 84
+ T__84 = 85
+ T__85 = 86
+ T__86 = 87
+ T__87 = 88
+ T__88 = 89
+ T__89 = 90
+ T__90 = 91
+ T__91 = 92
+ IDENTIFIER = 93
+ CHARACTER_LITERAL = 94
+ STRING_LITERAL = 95
+ HEX_LITERAL = 96
+ DECIMAL_LITERAL = 97
+ OCTAL_LITERAL = 98
+ FLOATING_POINT_LITERAL = 99
+ WS = 100
+ BS = 101
+ UnicodeVocabulary = 102
+ COMMENT = 103
+ LINE_COMMENT = 104
+ LINE_COMMAND = 105
+
+ channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
+
+ modeNames = [ "DEFAULT_MODE" ]
+
+ literalNames = [ "<INVALID>",
+ "'{'", "';'", "'typedef'", "','", "'='", "'extern'", "'static'",
+ "'auto'", "'register'", "'STATIC'", "'void'", "'char'", "'short'",
+ "'int'", "'long'", "'float'", "'double'", "'signed'", "'unsigned'",
+ "'}'", "'struct'", "'union'", "':'", "'enum'", "'const'", "'volatile'",
+ "'IN'", "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'", "'VOLATILE'",
+ "'GLOBAL_REMOVE_IF_UNREFERENCED'", "'EFIAPI'", "'EFI_BOOTSERVICE'",
+ "'EFI_RUNTIMESERVICE'", "'PACKED'", "'('", "')'", "'['", "']'",
+ "'*'", "'...'", "'+'", "'-'", "'/'", "'%'", "'++'", "'--'",
+ "'sizeof'", "'.'", "'->'", "'&'", "'~'", "'!'", "'*='", "'/='",
+ "'%='", "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
+ "'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='", "'<'",
+ "'>'", "'<='", "'>='", "'<<'", "'>>'", "'__asm__'", "'_asm'",
+ "'__asm'", "'case'", "'default'", "'if'", "'else'", "'switch'",
+ "'while'", "'do'", "'goto'", "'continue'", "'break'", "'return'" ]
+
+ symbolicNames = [ "<INVALID>",
+ "IDENTIFIER", "CHARACTER_LITERAL", "STRING_LITERAL", "HEX_LITERAL",
+ "DECIMAL_LITERAL", "OCTAL_LITERAL", "FLOATING_POINT_LITERAL",
+ "WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
+ "LINE_COMMAND" ]
+
+ ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
+ "T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13",
+ "T__14", "T__15", "T__16", "T__17", "T__18", "T__19",
+ "T__20", "T__21", "T__22", "T__23", "T__24", "T__25",
+ "T__26", "T__27", "T__28", "T__29", "T__30", "T__31",
+ "T__32", "T__33", "T__34", "T__35", "T__36", "T__37",
+ "T__38", "T__39", "T__40", "T__41", "T__42", "T__43",
+ "T__44", "T__45", "T__46", "T__47", "T__48", "T__49",
+ "T__50", "T__51", "T__52", "T__53", "T__54", "T__55",
+ "T__56", "T__57", "T__58", "T__59", "T__60", "T__61",
+ "T__62", "T__63", "T__64", "T__65", "T__66", "T__67",
+ "T__68", "T__69", "T__70", "T__71", "T__72", "T__73",
+ "T__74", "T__75", "T__76", "T__77", "T__78", "T__79",
+ "T__80", "T__81", "T__82", "T__83", "T__84", "T__85",
+ "T__86", "T__87", "T__88", "T__89", "T__90", "T__91",
+ "IDENTIFIER", "LETTER", "CHARACTER_LITERAL", "STRING_LITERAL",
+ "HEX_LITERAL", "DECIMAL_LITERAL", "OCTAL_LITERAL", "HexDigit",
+ "IntegerTypeSuffix", "FLOATING_POINT_LITERAL", "Exponent",
+ "FloatTypeSuffix", "EscapeSequence", "OctalEscape", "UnicodeEscape",
+ "WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
+ "LINE_COMMAND" ]
+
+ grammarFileName = "C.g4"
+
+ # @param output= sys.stdout Type: TextIO
+ def __init__(self,input=None,output= sys.stdout):
+ super().__init__(input, output)
+ self.checkVersion("4.7.1")
+ self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
+ self._actions = None
+ self._predicates = None
+
+
+
+ def printTokenInfo(self,line,offset,tokenText):
+ print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
+
+ def StorePredicateExpression(self,StartLine,StartOffset,EndLine,EndOffset,Text):
+ PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.PredicateExpressionList.append(PredExp)
+
+ def StoreEnumerationDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
+ EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.EnumerationDefinitionList.append(EnumDef)
+
+ def StoreStructUnionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
+ SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.StructUnionDefinitionList.append(SUDef)
+
+ def StoreTypedefDefinition(self,StartLine,StartOffset,EndLine,EndOffset,FromText,ToText):
+ Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.TypedefDefinitionList.append(Tdef)
+
+ def StoreFunctionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText,LeftBraceLine,LeftBraceOffset,DeclLine,DeclOffset):
+ FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
+ FileProfile.FunctionDefinitionList.append(FuncDef)
+
+ def StoreVariableDeclaration(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText):
+ VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.VariableDeclarationList.append(VarDecl)
+
+ def StoreFunctionCalling(self,StartLine,StartOffset,EndLine,EndOffset,FuncName,ParamList):
+ FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.FunctionCallingList.append(FuncCall)
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/CListener.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/CListener.py
new file mode 100755
index 00000000..4c624596
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/CListener.py
@@ -0,0 +1,808 @@
+# Generated from C.g4 by ANTLR 4.7.1
+from antlr4 import *
+if __name__ is not None and "." in __name__:
+ from .CParser import CParser
+else:
+ from CParser import CParser
+
+## @file
+# The file defines the parser for C source files.
+#
+# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+import Ecc.CodeFragment as CodeFragment
+import Ecc.FileProfile as FileProfile
+
+
+# This class defines a complete listener for a parse tree produced by CParser.
+class CListener(ParseTreeListener):
+
+ # Enter a parse tree produced by CParser#translation_unit.
+ # @param ctx Type: CParser.Translation_unitContext
+ def enterTranslation_unit(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#translation_unit.
+ # @param ctx Type: CParser.Translation_unitContext
+ def exitTranslation_unit(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#external_declaration.
+ # @param ctx Type: CParser.External_declarationContext
+ def enterExternal_declaration(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#external_declaration.
+ # @param ctx Type: CParser.External_declarationContext
+ def exitExternal_declaration(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#function_definition.
+ # @param ctx Type: CParser.Function_definitionContext
+ def enterFunction_definition(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#function_definition.
+ # @param ctx Type: CParser.Function_definitionContext
+ def exitFunction_definition(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#declaration_specifiers.
+ # @param ctx Type: CParser.Declaration_specifiersContext
+ def enterDeclaration_specifiers(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#declaration_specifiers.
+ # @param ctx Type: CParser.Declaration_specifiersContext
+ def exitDeclaration_specifiers(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#declaration.
+ # @param ctx Type: CParser.DeclarationContext
+ def enterDeclaration(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#declaration.
+ # @param ctx Type: CParser.DeclarationContext
+ def exitDeclaration(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#init_declarator_list.
+ # @param ctx Type: CParser.Init_declarator_listContext
+ def enterInit_declarator_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#init_declarator_list.
+ # @param ctx Type: CParser.Init_declarator_listContext
+ def exitInit_declarator_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#init_declarator.
+ # @param ctx Type: CParser.Init_declaratorContext
+ def enterInit_declarator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#init_declarator.
+ # @param ctx Type: CParser.Init_declaratorContext
+ def exitInit_declarator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#storage_class_specifier.
+ # @param ctx Type: CParser.Storage_class_specifierContext
+ def enterStorage_class_specifier(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#storage_class_specifier.
+ # @param ctx Type: CParser.Storage_class_specifierContext
+ def exitStorage_class_specifier(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#type_specifier.
+ # @param ctx Type: CParser.Type_specifierContext
+ def enterType_specifier(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#type_specifier.
+ # @param ctx Type: CParser.Type_specifierContext
+ def exitType_specifier(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#type_id.
+ # @param ctx Type: CParser.Type_idContext
+ def enterType_id(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#type_id.
+ # @param ctx Type: CParser.Type_idContext
+ def exitType_id(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#struct_or_union_specifier.
+ # @param ctx Type: CParser.Struct_or_union_specifierContext
+ def enterStruct_or_union_specifier(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#struct_or_union_specifier.
+ # @param ctx Type: CParser.Struct_or_union_specifierContext
+ def exitStruct_or_union_specifier(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#struct_or_union.
+ # @param ctx Type: CParser.Struct_or_unionContext
+ def enterStruct_or_union(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#struct_or_union.
+ # @param ctx Type: CParser.Struct_or_unionContext
+ def exitStruct_or_union(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#struct_declaration_list.
+ # @param ctx Type: CParser.Struct_declaration_listContext
+ def enterStruct_declaration_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#struct_declaration_list.
+ # @param ctx Type: CParser.Struct_declaration_listContext
+ def exitStruct_declaration_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#struct_declaration.
+ # @param ctx Type: CParser.Struct_declarationContext
+ def enterStruct_declaration(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#struct_declaration.
+ # @param ctx Type: CParser.Struct_declarationContext
+ def exitStruct_declaration(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#specifier_qualifier_list.
+ # @param ctx Type: CParser.Specifier_qualifier_listContext
+ def enterSpecifier_qualifier_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#specifier_qualifier_list.
+ # @param ctx Type: CParser.Specifier_qualifier_listContext
+ def exitSpecifier_qualifier_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#struct_declarator_list.
+ # @param ctx Type: CParser.Struct_declarator_listContext
+ def enterStruct_declarator_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#struct_declarator_list.
+ # @param ctx Type: CParser.Struct_declarator_listContext
+ def exitStruct_declarator_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#struct_declarator.
+ # @param ctx Type: CParser.Struct_declaratorContext
+ def enterStruct_declarator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#struct_declarator.
+ # @param ctx Type: CParser.Struct_declaratorContext
+ def exitStruct_declarator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#enum_specifier.
+ # @param ctx Type: CParser.Enum_specifierContext
+ def enterEnum_specifier(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#enum_specifier.
+ # @param ctx Type: CParser.Enum_specifierContext
+ def exitEnum_specifier(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#enumerator_list.
+ # @param ctx Type: CParser.Enumerator_listContext
+ def enterEnumerator_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#enumerator_list.
+ # @param ctx Type: CParser.Enumerator_listContext
+ def exitEnumerator_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#enumerator.
+ # @param ctx Type: CParser.EnumeratorContext
+ def enterEnumerator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#enumerator.
+ # @param ctx Type: CParser.EnumeratorContext
+ def exitEnumerator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#type_qualifier.
+ # @param ctx Type: CParser.Type_qualifierContext
+ def enterType_qualifier(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#type_qualifier.
+ # @param ctx Type: CParser.Type_qualifierContext
+ def exitType_qualifier(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#declarator.
+ # @param ctx Type: CParser.DeclaratorContext
+ def enterDeclarator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#declarator.
+ # @param ctx Type: CParser.DeclaratorContext
+ def exitDeclarator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#direct_declarator.
+ # @param ctx Type: CParser.Direct_declaratorContext
+ def enterDirect_declarator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#direct_declarator.
+ # @param ctx Type: CParser.Direct_declaratorContext
+ def exitDirect_declarator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#declarator_suffix.
+ # @param ctx Type: CParser.Declarator_suffixContext
+ def enterDeclarator_suffix(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#declarator_suffix.
+ # @param ctx Type: CParser.Declarator_suffixContext
+ def exitDeclarator_suffix(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#pointer.
+ # @param ctx Type: CParser.PointerContext
+ def enterPointer(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#pointer.
+ # @param ctx Type: CParser.PointerContext
+ def exitPointer(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#parameter_type_list.
+ # @param ctx Type: CParser.Parameter_type_listContext
+ def enterParameter_type_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#parameter_type_list.
+ # @param ctx Type: CParser.Parameter_type_listContext
+ def exitParameter_type_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#parameter_list.
+ # @param ctx Type: CParser.Parameter_listContext
+ def enterParameter_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#parameter_list.
+ # @param ctx Type: CParser.Parameter_listContext
+ def exitParameter_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#parameter_declaration.
+ # @param ctx Type: CParser.Parameter_declarationContext
+ def enterParameter_declaration(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#parameter_declaration.
+ # @param ctx Type: CParser.Parameter_declarationContext
+ def exitParameter_declaration(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#identifier_list.
+ # @param ctx Type: CParser.Identifier_listContext
+ def enterIdentifier_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#identifier_list.
+ # @param ctx Type: CParser.Identifier_listContext
+ def exitIdentifier_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#type_name.
+ # @param ctx Type: CParser.Type_nameContext
+ def enterType_name(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#type_name.
+ # @param ctx Type: CParser.Type_nameContext
+ def exitType_name(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#abstract_declarator.
+ # @param ctx Type: CParser.Abstract_declaratorContext
+ def enterAbstract_declarator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#abstract_declarator.
+ # @param ctx Type: CParser.Abstract_declaratorContext
+ def exitAbstract_declarator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#direct_abstract_declarator.
+ # @param ctx Type: CParser.Direct_abstract_declaratorContext
+ def enterDirect_abstract_declarator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#direct_abstract_declarator.
+ # @param ctx Type: CParser.Direct_abstract_declaratorContext
+ def exitDirect_abstract_declarator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#abstract_declarator_suffix.
+ # @param ctx Type: CParser.Abstract_declarator_suffixContext
+ def enterAbstract_declarator_suffix(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#abstract_declarator_suffix.
+ # @param ctx Type: CParser.Abstract_declarator_suffixContext
+ def exitAbstract_declarator_suffix(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#initializer.
+ # @param ctx Type: CParser.InitializerContext
+ def enterInitializer(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#initializer.
+ # @param ctx Type: CParser.InitializerContext
+ def exitInitializer(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#initializer_list.
+ # @param ctx Type: CParser.Initializer_listContext
+ def enterInitializer_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#initializer_list.
+ # @param ctx Type: CParser.Initializer_listContext
+ def exitInitializer_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#argument_expression_list.
+ # @param ctx Type: CParser.Argument_expression_listContext
+ def enterArgument_expression_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#argument_expression_list.
+ # @param ctx Type: CParser.Argument_expression_listContext
+ def exitArgument_expression_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#additive_expression.
+ # @param ctx Type: CParser.Additive_expressionContext
+ def enterAdditive_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#additive_expression.
+ # @param ctx Type: CParser.Additive_expressionContext
+ def exitAdditive_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#multiplicative_expression.
+ # @param ctx Type: CParser.Multiplicative_expressionContext
+ def enterMultiplicative_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#multiplicative_expression.
+ # @param ctx Type: CParser.Multiplicative_expressionContext
+ def exitMultiplicative_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#cast_expression.
+ # @param ctx Type: CParser.Cast_expressionContext
+ def enterCast_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#cast_expression.
+ # @param ctx Type: CParser.Cast_expressionContext
+ def exitCast_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#unary_expression.
+ # @param ctx Type: CParser.Unary_expressionContext
+ def enterUnary_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#unary_expression.
+ # @param ctx Type: CParser.Unary_expressionContext
+ def exitUnary_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#postfix_expression.
+ # @param ctx Type: CParser.Postfix_expressionContext
+ def enterPostfix_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#postfix_expression.
+ # @param ctx Type: CParser.Postfix_expressionContext
+ def exitPostfix_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#macro_parameter_list.
+ # @param ctx Type: CParser.Macro_parameter_listContext
+ def enterMacro_parameter_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#macro_parameter_list.
+ # @param ctx Type: CParser.Macro_parameter_listContext
+ def exitMacro_parameter_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#unary_operator.
+ # @param ctx Type: CParser.Unary_operatorContext
+ def enterUnary_operator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#unary_operator.
+ # @param ctx Type: CParser.Unary_operatorContext
+ def exitUnary_operator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#primary_expression.
+ # @param ctx Type: CParser.Primary_expressionContext
+ def enterPrimary_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#primary_expression.
+ # @param ctx Type: CParser.Primary_expressionContext
+ def exitPrimary_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#constant.
+ # @param ctx Type: CParser.ConstantContext
+ def enterConstant(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#constant.
+ # @param ctx Type: CParser.ConstantContext
+ def exitConstant(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#expression.
+ # @param ctx Type: CParser.ExpressionContext
+ def enterExpression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#expression.
+ # @param ctx Type: CParser.ExpressionContext
+ def exitExpression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#constant_expression.
+ # @param ctx Type: CParser.Constant_expressionContext
+ def enterConstant_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#constant_expression.
+ # @param ctx Type: CParser.Constant_expressionContext
+ def exitConstant_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#assignment_expression.
+ # @param ctx Type: CParser.Assignment_expressionContext
+ def enterAssignment_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#assignment_expression.
+ # @param ctx Type: CParser.Assignment_expressionContext
+ def exitAssignment_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#lvalue.
+ # @param ctx Type: CParser.LvalueContext
+ def enterLvalue(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#lvalue.
+ # @param ctx Type: CParser.LvalueContext
+ def exitLvalue(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#assignment_operator.
+ # @param ctx Type: CParser.Assignment_operatorContext
+ def enterAssignment_operator(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#assignment_operator.
+ # @param ctx Type: CParser.Assignment_operatorContext
+ def exitAssignment_operator(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#conditional_expression.
+ # @param ctx Type: CParser.Conditional_expressionContext
+ def enterConditional_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#conditional_expression.
+ # @param ctx Type: CParser.Conditional_expressionContext
+ def exitConditional_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#logical_or_expression.
+ # @param ctx Type: CParser.Logical_or_expressionContext
+ def enterLogical_or_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#logical_or_expression.
+ # @param ctx Type: CParser.Logical_or_expressionContext
+ def exitLogical_or_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#logical_and_expression.
+ # @param ctx Type: CParser.Logical_and_expressionContext
+ def enterLogical_and_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#logical_and_expression.
+ # @param ctx Type: CParser.Logical_and_expressionContext
+ def exitLogical_and_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#inclusive_or_expression.
+ # @param ctx Type: CParser.Inclusive_or_expressionContext
+ def enterInclusive_or_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#inclusive_or_expression.
+ # @param ctx Type: CParser.Inclusive_or_expressionContext
+ def exitInclusive_or_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#exclusive_or_expression.
+ # @param ctx Type: CParser.Exclusive_or_expressionContext
+ def enterExclusive_or_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#exclusive_or_expression.
+ # @param ctx Type: CParser.Exclusive_or_expressionContext
+ def exitExclusive_or_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#and_expression.
+ # @param ctx Type: CParser.And_expressionContext
+ def enterAnd_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#and_expression.
+ # @param ctx Type: CParser.And_expressionContext
+ def exitAnd_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#equality_expression.
+ # @param ctx Type: CParser.Equality_expressionContext
+ def enterEquality_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#equality_expression.
+ # @param ctx Type: CParser.Equality_expressionContext
+ def exitEquality_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#relational_expression.
+ # @param ctx Type: CParser.Relational_expressionContext
+ def enterRelational_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#relational_expression.
+ # @param ctx Type: CParser.Relational_expressionContext
+ def exitRelational_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#shift_expression.
+ # @param ctx Type: CParser.Shift_expressionContext
+ def enterShift_expression(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#shift_expression.
+ # @param ctx Type: CParser.Shift_expressionContext
+ def exitShift_expression(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#statement.
+ # @param ctx Type: CParser.StatementContext
+ def enterStatement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#statement.
+ # @param ctx Type: CParser.StatementContext
+ def exitStatement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#asm2_statement.
+ # @param ctx Type: CParser.Asm2_statementContext
+ def enterAsm2_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#asm2_statement.
+ # @param ctx Type: CParser.Asm2_statementContext
+ def exitAsm2_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#asm1_statement.
+ # @param ctx Type: CParser.Asm1_statementContext
+ def enterAsm1_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#asm1_statement.
+ # @param ctx Type: CParser.Asm1_statementContext
+ def exitAsm1_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#asm_statement.
+ # @param ctx Type: CParser.Asm_statementContext
+ def enterAsm_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#asm_statement.
+ # @param ctx Type: CParser.Asm_statementContext
+ def exitAsm_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#macro_statement.
+ # @param ctx Type: CParser.Macro_statementContext
+ def enterMacro_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#macro_statement.
+ # @param ctx Type: CParser.Macro_statementContext
+ def exitMacro_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#labeled_statement.
+ # @param ctx Type: CParser.Labeled_statementContext
+ def enterLabeled_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#labeled_statement.
+ # @param ctx Type: CParser.Labeled_statementContext
+ def exitLabeled_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#compound_statement.
+ # @param ctx Type: CParser.Compound_statementContext
+ def enterCompound_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#compound_statement.
+ # @param ctx Type: CParser.Compound_statementContext
+ def exitCompound_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#statement_list.
+ # @param ctx Type: CParser.Statement_listContext
+ def enterStatement_list(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#statement_list.
+ # @param ctx Type: CParser.Statement_listContext
+ def exitStatement_list(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#expression_statement.
+ # @param ctx Type: CParser.Expression_statementContext
+ def enterExpression_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#expression_statement.
+ # @param ctx Type: CParser.Expression_statementContext
+ def exitExpression_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#selection_statement.
+ # @param ctx Type: CParser.Selection_statementContext
+ def enterSelection_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#selection_statement.
+ # @param ctx Type: CParser.Selection_statementContext
+ def exitSelection_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#iteration_statement.
+ # @param ctx Type: CParser.Iteration_statementContext
+ def enterIteration_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#iteration_statement.
+ # @param ctx Type: CParser.Iteration_statementContext
+ def exitIteration_statement(self,ctx):
+ pass
+
+
+ # Enter a parse tree produced by CParser#jump_statement.
+ # @param ctx Type: CParser.Jump_statementContext
+ def enterJump_statement(self,ctx):
+ pass
+
+ # Exit a parse tree produced by CParser#jump_statement.
+ # @param ctx Type: CParser.Jump_statementContext
+ def exitJump_statement(self,ctx):
+ pass
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/CParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/CParser.py
new file mode 100755
index 00000000..d8d55759
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/CParser.py
@@ -0,0 +1,6273 @@
+# Generated from C.g4 by ANTLR 4.7.1
+# encoding: utf-8
+from antlr4 import *
+from io import StringIO
+from typing.io import TextIO
+import sys
+
+
+## @file
+# The file defines the parser for C source files.
+#
+# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+import Ecc.CodeFragment as CodeFragment
+import Ecc.FileProfile as FileProfile
+
+def serializedATN():
+ with StringIO() as buf:
+ buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3k")
+ buf.write("\u0380\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
+ buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
+ buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
+ buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
+ buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36")
+ buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t")
+ buf.write("&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.\t.\4")
+ buf.write("/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t\64")
+ buf.write("\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t")
+ buf.write(";\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\t")
+ buf.write("D\4E\tE\4F\tF\4G\tG\4H\tH\3\2\7\2\u0092\n\2\f\2\16\2\u0095")
+ buf.write("\13\2\3\3\5\3\u0098\n\3\3\3\3\3\7\3\u009c\n\3\f\3\16\3")
+ buf.write("\u009f\13\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3\u00a7\n\3\5\3")
+ buf.write("\u00a9\n\3\3\4\5\4\u00ac\n\4\3\4\3\4\6\4\u00b0\n\4\r\4")
+ buf.write("\16\4\u00b1\3\4\3\4\3\4\5\4\u00b7\n\4\3\4\3\4\3\5\3\5")
+ buf.write("\3\5\6\5\u00be\n\5\r\5\16\5\u00bf\3\6\3\6\5\6\u00c4\n")
+ buf.write("\6\3\6\3\6\3\6\3\6\3\6\3\6\5\6\u00cc\n\6\3\6\3\6\3\6\5")
+ buf.write("\6\u00d1\n\6\3\7\3\7\3\7\7\7\u00d6\n\7\f\7\16\7\u00d9")
+ buf.write("\13\7\3\b\3\b\3\b\5\b\u00de\n\b\3\t\3\t\3\n\3\n\3\n\3")
+ buf.write("\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n")
+ buf.write("\7\n\u00f3\n\n\f\n\16\n\u00f6\13\n\3\n\3\n\5\n\u00fa\n")
+ buf.write("\n\3\13\3\13\3\f\3\f\5\f\u0100\n\f\3\f\3\f\3\f\3\f\3\f")
+ buf.write("\3\f\3\f\5\f\u0109\n\f\3\r\3\r\3\16\6\16\u010e\n\16\r")
+ buf.write("\16\16\16\u010f\3\17\3\17\3\17\3\17\3\20\3\20\6\20\u0118")
+ buf.write("\n\20\r\20\16\20\u0119\3\21\3\21\3\21\7\21\u011f\n\21")
+ buf.write("\f\21\16\21\u0122\13\21\3\22\3\22\3\22\5\22\u0127\n\22")
+ buf.write("\3\22\3\22\5\22\u012b\n\22\3\23\3\23\3\23\3\23\5\23\u0131")
+ buf.write("\n\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\5\23\u013a\n")
+ buf.write("\23\3\23\3\23\3\23\3\23\5\23\u0140\n\23\3\24\3\24\3\24")
+ buf.write("\7\24\u0145\n\24\f\24\16\24\u0148\13\24\3\25\3\25\3\25")
+ buf.write("\5\25\u014d\n\25\3\26\3\26\3\27\5\27\u0152\n\27\3\27\5")
+ buf.write("\27\u0155\n\27\3\27\5\27\u0158\n\27\3\27\5\27\u015b\n")
+ buf.write("\27\3\27\3\27\5\27\u015f\n\27\3\30\3\30\7\30\u0163\n\30")
+ buf.write("\f\30\16\30\u0166\13\30\3\30\3\30\5\30\u016a\n\30\3\30")
+ buf.write("\3\30\3\30\6\30\u016f\n\30\r\30\16\30\u0170\5\30\u0173")
+ buf.write("\n\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31")
+ buf.write("\3\31\3\31\3\31\3\31\3\31\3\31\5\31\u0185\n\31\3\32\3")
+ buf.write("\32\6\32\u0189\n\32\r\32\16\32\u018a\3\32\5\32\u018e\n")
+ buf.write("\32\3\32\3\32\3\32\5\32\u0193\n\32\3\33\3\33\3\33\5\33")
+ buf.write("\u0198\n\33\3\33\5\33\u019b\n\33\3\34\3\34\3\34\5\34\u01a0")
+ buf.write("\n\34\3\34\7\34\u01a3\n\34\f\34\16\34\u01a6\13\34\3\35")
+ buf.write("\3\35\3\35\7\35\u01ab\n\35\f\35\16\35\u01ae\13\35\3\35")
+ buf.write("\5\35\u01b1\n\35\3\35\7\35\u01b4\n\35\f\35\16\35\u01b7")
+ buf.write("\13\35\3\35\5\35\u01ba\n\35\3\36\3\36\3\36\7\36\u01bf")
+ buf.write("\n\36\f\36\16\36\u01c2\13\36\3\37\3\37\5\37\u01c6\n\37")
+ buf.write("\3\37\5\37\u01c9\n\37\3 \3 \5 \u01cd\n \3 \5 \u01d0\n")
+ buf.write(" \3!\3!\3!\3!\3!\5!\u01d7\n!\3!\7!\u01da\n!\f!\16!\u01dd")
+ buf.write("\13!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\5")
+ buf.write("\"\u01eb\n\"\3#\3#\3#\3#\5#\u01f1\n#\3#\3#\5#\u01f5\n")
+ buf.write("#\3$\3$\3$\7$\u01fa\n$\f$\16$\u01fd\13$\3%\3%\5%\u0201")
+ buf.write("\n%\3%\3%\3%\5%\u0206\n%\7%\u0208\n%\f%\16%\u020b\13%")
+ buf.write("\3&\3&\3&\3&\3&\7&\u0212\n&\f&\16&\u0215\13&\3\'\3\'\3")
+ buf.write("\'\3\'\3\'\3\'\3\'\7\'\u021e\n\'\f\'\16\'\u0221\13\'\3")
+ buf.write("(\3(\3(\3(\3(\3(\5(\u0229\n(\3)\3)\3)\3)\3)\3)\3)\3)\3")
+ buf.write(")\3)\3)\3)\3)\3)\3)\5)\u023a\n)\3*\3*\3*\3*\3*\3*\3*\3")
+ buf.write("*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3")
+ buf.write("*\3*\3*\3*\7*\u0259\n*\f*\16*\u025c\13*\3+\3+\3+\7+\u0261")
+ buf.write("\n+\f+\16+\u0264\13+\3,\3,\3-\3-\3-\3-\3-\3-\5-\u026e")
+ buf.write("\n-\3.\3.\3.\3.\3.\7.\u0275\n.\f.\16.\u0278\13.\3.\6.")
+ buf.write("\u027b\n.\r.\16.\u027c\6.\u027f\n.\r.\16.\u0280\3.\7.")
+ buf.write("\u0284\n.\f.\16.\u0287\13.\3.\5.\u028a\n.\3/\3/\3/\7/")
+ buf.write("\u028f\n/\f/\16/\u0292\13/\3\60\3\60\3\61\3\61\3\61\3")
+ buf.write("\61\3\61\5\61\u029b\n\61\3\62\3\62\3\63\3\63\3\64\3\64")
+ buf.write("\3\64\3\64\3\64\3\64\3\64\5\64\u02a8\n\64\3\65\3\65\3")
+ buf.write("\65\7\65\u02ad\n\65\f\65\16\65\u02b0\13\65\3\66\3\66\3")
+ buf.write("\66\7\66\u02b5\n\66\f\66\16\66\u02b8\13\66\3\67\3\67\3")
+ buf.write("\67\7\67\u02bd\n\67\f\67\16\67\u02c0\13\67\38\38\38\7")
+ buf.write("8\u02c5\n8\f8\168\u02c8\138\39\39\39\79\u02cd\n9\f9\16")
+ buf.write("9\u02d0\139\3:\3:\3:\7:\u02d5\n:\f:\16:\u02d8\13:\3;\3")
+ buf.write(";\3;\7;\u02dd\n;\f;\16;\u02e0\13;\3<\3<\3<\7<\u02e5\n")
+ buf.write("<\f<\16<\u02e8\13<\3=\3=\3=\3=\3=\3=\3=\3=\3=\3=\3=\5")
+ buf.write("=\u02f5\n=\3>\5>\u02f8\n>\3>\3>\3>\7>\u02fd\n>\f>\16>")
+ buf.write("\u0300\13>\3>\3>\3>\3?\3?\3?\7?\u0308\n?\f?\16?\u030b")
+ buf.write("\13?\3?\3?\3@\3@\3@\7@\u0312\n@\f@\16@\u0315\13@\3@\3")
+ buf.write("@\3A\3A\3A\7A\u031c\nA\fA\16A\u031f\13A\3A\5A\u0322\n")
+ buf.write("A\3A\5A\u0325\nA\3A\3A\3B\3B\3B\3B\3B\3B\3B\3B\3B\3B\3")
+ buf.write("B\5B\u0334\nB\3C\3C\7C\u0338\nC\fC\16C\u033b\13C\3C\5")
+ buf.write("C\u033e\nC\3C\3C\3D\6D\u0343\nD\rD\16D\u0344\3E\3E\3E")
+ buf.write("\3E\5E\u034b\nE\3F\3F\3F\3F\3F\3F\3F\3F\5F\u0355\nF\3")
+ buf.write("F\3F\3F\3F\3F\3F\5F\u035d\nF\3G\3G\3G\3G\3G\3G\3G\3G\3")
+ buf.write("G\3G\3G\3G\3G\3G\3G\3G\5G\u036f\nG\3H\3H\3H\3H\3H\3H\3")
+ buf.write("H\3H\3H\3H\3H\3H\3H\5H\u037e\nH\3H\2\2I\2\4\6\b\n\f\16")
+ buf.write("\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDF")
+ buf.write("HJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0086")
+ buf.write("\u0088\u008a\u008c\u008e\2\f\3\2\b\f\3\2\27\30\3\2\33")
+ buf.write("\'\5\2,,./\679\4\2\7\7:C\3\2IJ\3\2KN\3\2OP\3\2\4\4\3\2")
+ buf.write("\26\26\2\u03d8\2\u0093\3\2\2\2\4\u00a8\3\2\2\2\6\u00ab")
+ buf.write("\3\2\2\2\b\u00bd\3\2\2\2\n\u00d0\3\2\2\2\f\u00d2\3\2\2")
+ buf.write("\2\16\u00da\3\2\2\2\20\u00df\3\2\2\2\22\u00f9\3\2\2\2")
+ buf.write("\24\u00fb\3\2\2\2\26\u0108\3\2\2\2\30\u010a\3\2\2\2\32")
+ buf.write("\u010d\3\2\2\2\34\u0111\3\2\2\2\36\u0117\3\2\2\2 \u011b")
+ buf.write("\3\2\2\2\"\u012a\3\2\2\2$\u013f\3\2\2\2&\u0141\3\2\2\2")
+ buf.write("(\u0149\3\2\2\2*\u014e\3\2\2\2,\u015e\3\2\2\2.\u0172\3")
+ buf.write("\2\2\2\60\u0184\3\2\2\2\62\u0192\3\2\2\2\64\u0194\3\2")
+ buf.write("\2\2\66\u019c\3\2\2\28\u01b9\3\2\2\2:\u01bb\3\2\2\2<\u01c8")
+ buf.write("\3\2\2\2>\u01cf\3\2\2\2@\u01d6\3\2\2\2B\u01ea\3\2\2\2")
+ buf.write("D\u01f4\3\2\2\2F\u01f6\3\2\2\2H\u01fe\3\2\2\2J\u020c\3")
+ buf.write("\2\2\2L\u0216\3\2\2\2N\u0228\3\2\2\2P\u0239\3\2\2\2R\u023b")
+ buf.write("\3\2\2\2T\u025d\3\2\2\2V\u0265\3\2\2\2X\u026d\3\2\2\2")
+ buf.write("Z\u0289\3\2\2\2\\\u028b\3\2\2\2^\u0293\3\2\2\2`\u029a")
+ buf.write("\3\2\2\2b\u029c\3\2\2\2d\u029e\3\2\2\2f\u02a0\3\2\2\2")
+ buf.write("h\u02a9\3\2\2\2j\u02b1\3\2\2\2l\u02b9\3\2\2\2n\u02c1\3")
+ buf.write("\2\2\2p\u02c9\3\2\2\2r\u02d1\3\2\2\2t\u02d9\3\2\2\2v\u02e1")
+ buf.write("\3\2\2\2x\u02f4\3\2\2\2z\u02f7\3\2\2\2|\u0304\3\2\2\2")
+ buf.write("~\u030e\3\2\2\2\u0080\u0318\3\2\2\2\u0082\u0333\3\2\2")
+ buf.write("\2\u0084\u0335\3\2\2\2\u0086\u0342\3\2\2\2\u0088\u034a")
+ buf.write("\3\2\2\2\u008a\u035c\3\2\2\2\u008c\u036e\3\2\2\2\u008e")
+ buf.write("\u037d\3\2\2\2\u0090\u0092\5\4\3\2\u0091\u0090\3\2\2\2")
+ buf.write("\u0092\u0095\3\2\2\2\u0093\u0091\3\2\2\2\u0093\u0094\3")
+ buf.write("\2\2\2\u0094\3\3\2\2\2\u0095\u0093\3\2\2\2\u0096\u0098")
+ buf.write("\5\b\5\2\u0097\u0096\3\2\2\2\u0097\u0098\3\2\2\2\u0098")
+ buf.write("\u0099\3\2\2\2\u0099\u009d\5,\27\2\u009a\u009c\5\n\6\2")
+ buf.write("\u009b\u009a\3\2\2\2\u009c\u009f\3\2\2\2\u009d\u009b\3")
+ buf.write("\2\2\2\u009d\u009e\3\2\2\2\u009e\u00a0\3\2\2\2\u009f\u009d")
+ buf.write("\3\2\2\2\u00a0\u00a1\7\3\2\2\u00a1\u00a9\3\2\2\2\u00a2")
+ buf.write("\u00a9\5\6\4\2\u00a3\u00a9\5\n\6\2\u00a4\u00a6\5\u0080")
+ buf.write("A\2\u00a5\u00a7\7\4\2\2\u00a6\u00a5\3\2\2\2\u00a6\u00a7")
+ buf.write("\3\2\2\2\u00a7\u00a9\3\2\2\2\u00a8\u0097\3\2\2\2\u00a8")
+ buf.write("\u00a2\3\2\2\2\u00a8\u00a3\3\2\2\2\u00a8\u00a4\3\2\2\2")
+ buf.write("\u00a9\5\3\2\2\2\u00aa\u00ac\5\b\5\2\u00ab\u00aa\3\2\2")
+ buf.write("\2\u00ab\u00ac\3\2\2\2\u00ac\u00ad\3\2\2\2\u00ad\u00b6")
+ buf.write("\5,\27\2\u00ae\u00b0\5\n\6\2\u00af\u00ae\3\2\2\2\u00b0")
+ buf.write("\u00b1\3\2\2\2\u00b1\u00af\3\2\2\2\u00b1\u00b2\3\2\2\2")
+ buf.write("\u00b2\u00b3\3\2\2\2\u00b3\u00b4\5\u0084C\2\u00b4\u00b7")
+ buf.write("\3\2\2\2\u00b5\u00b7\5\u0084C\2\u00b6\u00af\3\2\2\2\u00b6")
+ buf.write("\u00b5\3\2\2\2\u00b7\u00b8\3\2\2\2\u00b8\u00b9\b\4\1\2")
+ buf.write("\u00b9\7\3\2\2\2\u00ba\u00be\5\20\t\2\u00bb\u00be\5\22")
+ buf.write("\n\2\u00bc\u00be\5*\26\2\u00bd\u00ba\3\2\2\2\u00bd\u00bb")
+ buf.write("\3\2\2\2\u00bd\u00bc\3\2\2\2\u00be\u00bf\3\2\2\2\u00bf")
+ buf.write("\u00bd\3\2\2\2\u00bf\u00c0\3\2\2\2\u00c0\t\3\2\2\2\u00c1")
+ buf.write("\u00c3\7\5\2\2\u00c2\u00c4\5\b\5\2\u00c3\u00c2\3\2\2\2")
+ buf.write("\u00c3\u00c4\3\2\2\2\u00c4\u00c5\3\2\2\2\u00c5\u00c6\5")
+ buf.write("\f\7\2\u00c6\u00c7\7\4\2\2\u00c7\u00c8\b\6\1\2\u00c8\u00d1")
+ buf.write("\3\2\2\2\u00c9\u00cb\5\b\5\2\u00ca\u00cc\5\f\7\2\u00cb")
+ buf.write("\u00ca\3\2\2\2\u00cb\u00cc\3\2\2\2\u00cc\u00cd\3\2\2\2")
+ buf.write("\u00cd\u00ce\7\4\2\2\u00ce\u00cf\b\6\1\2\u00cf\u00d1\3")
+ buf.write("\2\2\2\u00d0\u00c1\3\2\2\2\u00d0\u00c9\3\2\2\2\u00d1\13")
+ buf.write("\3\2\2\2\u00d2\u00d7\5\16\b\2\u00d3\u00d4\7\6\2\2\u00d4")
+ buf.write("\u00d6\5\16\b\2\u00d5\u00d3\3\2\2\2\u00d6\u00d9\3\2\2")
+ buf.write("\2\u00d7\u00d5\3\2\2\2\u00d7\u00d8\3\2\2\2\u00d8\r\3\2")
+ buf.write("\2\2\u00d9\u00d7\3\2\2\2\u00da\u00dd\5,\27\2\u00db\u00dc")
+ buf.write("\7\7\2\2\u00dc\u00de\5D#\2\u00dd\u00db\3\2\2\2\u00dd\u00de")
+ buf.write("\3\2\2\2\u00de\17\3\2\2\2\u00df\u00e0\t\2\2\2\u00e0\21")
+ buf.write("\3\2\2\2\u00e1\u00fa\7\r\2\2\u00e2\u00fa\7\16\2\2\u00e3")
+ buf.write("\u00fa\7\17\2\2\u00e4\u00fa\7\20\2\2\u00e5\u00fa\7\21")
+ buf.write("\2\2\u00e6\u00fa\7\22\2\2\u00e7\u00fa\7\23\2\2\u00e8\u00fa")
+ buf.write("\7\24\2\2\u00e9\u00fa\7\25\2\2\u00ea\u00eb\5\26\f\2\u00eb")
+ buf.write("\u00ec\b\n\1\2\u00ec\u00fa\3\2\2\2\u00ed\u00ee\5$\23\2")
+ buf.write("\u00ee\u00ef\b\n\1\2\u00ef\u00fa\3\2\2\2\u00f0\u00f4\7")
+ buf.write("_\2\2\u00f1\u00f3\5*\26\2\u00f2\u00f1\3\2\2\2\u00f3\u00f6")
+ buf.write("\3\2\2\2\u00f4\u00f2\3\2\2\2\u00f4\u00f5\3\2\2\2\u00f5")
+ buf.write("\u00f7\3\2\2\2\u00f6\u00f4\3\2\2\2\u00f7\u00fa\5,\27\2")
+ buf.write("\u00f8\u00fa\5\24\13\2\u00f9\u00e1\3\2\2\2\u00f9\u00e2")
+ buf.write("\3\2\2\2\u00f9\u00e3\3\2\2\2\u00f9\u00e4\3\2\2\2\u00f9")
+ buf.write("\u00e5\3\2\2\2\u00f9\u00e6\3\2\2\2\u00f9\u00e7\3\2\2\2")
+ buf.write("\u00f9\u00e8\3\2\2\2\u00f9\u00e9\3\2\2\2\u00f9\u00ea\3")
+ buf.write("\2\2\2\u00f9\u00ed\3\2\2\2\u00f9\u00f0\3\2\2\2\u00f9\u00f8")
+ buf.write("\3\2\2\2\u00fa\23\3\2\2\2\u00fb\u00fc\7_\2\2\u00fc\25")
+ buf.write("\3\2\2\2\u00fd\u00ff\5\30\r\2\u00fe\u0100\7_\2\2\u00ff")
+ buf.write("\u00fe\3\2\2\2\u00ff\u0100\3\2\2\2\u0100\u0101\3\2\2\2")
+ buf.write("\u0101\u0102\7\3\2\2\u0102\u0103\5\32\16\2\u0103\u0104")
+ buf.write("\7\26\2\2\u0104\u0109\3\2\2\2\u0105\u0106\5\30\r\2\u0106")
+ buf.write("\u0107\7_\2\2\u0107\u0109\3\2\2\2\u0108\u00fd\3\2\2\2")
+ buf.write("\u0108\u0105\3\2\2\2\u0109\27\3\2\2\2\u010a\u010b\t\3")
+ buf.write("\2\2\u010b\31\3\2\2\2\u010c\u010e\5\34\17\2\u010d\u010c")
+ buf.write("\3\2\2\2\u010e\u010f\3\2\2\2\u010f\u010d\3\2\2\2\u010f")
+ buf.write("\u0110\3\2\2\2\u0110\33\3\2\2\2\u0111\u0112\5\36\20\2")
+ buf.write("\u0112\u0113\5 \21\2\u0113\u0114\7\4\2\2\u0114\35\3\2")
+ buf.write("\2\2\u0115\u0118\5*\26\2\u0116\u0118\5\22\n\2\u0117\u0115")
+ buf.write("\3\2\2\2\u0117\u0116\3\2\2\2\u0118\u0119\3\2\2\2\u0119")
+ buf.write("\u0117\3\2\2\2\u0119\u011a\3\2\2\2\u011a\37\3\2\2\2\u011b")
+ buf.write("\u0120\5\"\22\2\u011c\u011d\7\6\2\2\u011d\u011f\5\"\22")
+ buf.write("\2\u011e\u011c\3\2\2\2\u011f\u0122\3\2\2\2\u0120\u011e")
+ buf.write("\3\2\2\2\u0120\u0121\3\2\2\2\u0121!\3\2\2\2\u0122\u0120")
+ buf.write("\3\2\2\2\u0123\u0126\5,\27\2\u0124\u0125\7\31\2\2\u0125")
+ buf.write("\u0127\5^\60\2\u0126\u0124\3\2\2\2\u0126\u0127\3\2\2\2")
+ buf.write("\u0127\u012b\3\2\2\2\u0128\u0129\7\31\2\2\u0129\u012b")
+ buf.write("\5^\60\2\u012a\u0123\3\2\2\2\u012a\u0128\3\2\2\2\u012b")
+ buf.write("#\3\2\2\2\u012c\u012d\7\32\2\2\u012d\u012e\7\3\2\2\u012e")
+ buf.write("\u0130\5&\24\2\u012f\u0131\7\6\2\2\u0130\u012f\3\2\2\2")
+ buf.write("\u0130\u0131\3\2\2\2\u0131\u0132\3\2\2\2\u0132\u0133\7")
+ buf.write("\26\2\2\u0133\u0140\3\2\2\2\u0134\u0135\7\32\2\2\u0135")
+ buf.write("\u0136\7_\2\2\u0136\u0137\7\3\2\2\u0137\u0139\5&\24\2")
+ buf.write("\u0138\u013a\7\6\2\2\u0139\u0138\3\2\2\2\u0139\u013a\3")
+ buf.write("\2\2\2\u013a\u013b\3\2\2\2\u013b\u013c\7\26\2\2\u013c")
+ buf.write("\u0140\3\2\2\2\u013d\u013e\7\32\2\2\u013e\u0140\7_\2\2")
+ buf.write("\u013f\u012c\3\2\2\2\u013f\u0134\3\2\2\2\u013f\u013d\3")
+ buf.write("\2\2\2\u0140%\3\2\2\2\u0141\u0146\5(\25\2\u0142\u0143")
+ buf.write("\7\6\2\2\u0143\u0145\5(\25\2\u0144\u0142\3\2\2\2\u0145")
+ buf.write("\u0148\3\2\2\2\u0146\u0144\3\2\2\2\u0146\u0147\3\2\2\2")
+ buf.write("\u0147\'\3\2\2\2\u0148\u0146\3\2\2\2\u0149\u014c\7_\2")
+ buf.write("\2\u014a\u014b\7\7\2\2\u014b\u014d\5^\60\2\u014c\u014a")
+ buf.write("\3\2\2\2\u014c\u014d\3\2\2\2\u014d)\3\2\2\2\u014e\u014f")
+ buf.write("\t\4\2\2\u014f+\3\2\2\2\u0150\u0152\5\62\32\2\u0151\u0150")
+ buf.write("\3\2\2\2\u0151\u0152\3\2\2\2\u0152\u0154\3\2\2\2\u0153")
+ buf.write("\u0155\7$\2\2\u0154\u0153\3\2\2\2\u0154\u0155\3\2\2\2")
+ buf.write("\u0155\u0157\3\2\2\2\u0156\u0158\7%\2\2\u0157\u0156\3")
+ buf.write("\2\2\2\u0157\u0158\3\2\2\2\u0158\u015a\3\2\2\2\u0159\u015b")
+ buf.write("\7&\2\2\u015a\u0159\3\2\2\2\u015a\u015b\3\2\2\2\u015b")
+ buf.write("\u015c\3\2\2\2\u015c\u015f\5.\30\2\u015d\u015f\5\62\32")
+ buf.write("\2\u015e\u0151\3\2\2\2\u015e\u015d\3\2\2\2\u015f-\3\2")
+ buf.write("\2\2\u0160\u0164\7_\2\2\u0161\u0163\5\60\31\2\u0162\u0161")
+ buf.write("\3\2\2\2\u0163\u0166\3\2\2\2\u0164\u0162\3\2\2\2\u0164")
+ buf.write("\u0165\3\2\2\2\u0165\u0173\3\2\2\2\u0166\u0164\3\2\2\2")
+ buf.write("\u0167\u0169\7(\2\2\u0168\u016a\7$\2\2\u0169\u0168\3\2")
+ buf.write("\2\2\u0169\u016a\3\2\2\2\u016a\u016b\3\2\2\2\u016b\u016c")
+ buf.write("\5,\27\2\u016c\u016e\7)\2\2\u016d\u016f\5\60\31\2\u016e")
+ buf.write("\u016d\3\2\2\2\u016f\u0170\3\2\2\2\u0170\u016e\3\2\2\2")
+ buf.write("\u0170\u0171\3\2\2\2\u0171\u0173\3\2\2\2\u0172\u0160\3")
+ buf.write("\2\2\2\u0172\u0167\3\2\2\2\u0173/\3\2\2\2\u0174\u0175")
+ buf.write("\7*\2\2\u0175\u0176\5^\60\2\u0176\u0177\7+\2\2\u0177\u0185")
+ buf.write("\3\2\2\2\u0178\u0179\7*\2\2\u0179\u0185\7+\2\2\u017a\u017b")
+ buf.write("\7(\2\2\u017b\u017c\5\64\33\2\u017c\u017d\7)\2\2\u017d")
+ buf.write("\u0185\3\2\2\2\u017e\u017f\7(\2\2\u017f\u0180\5:\36\2")
+ buf.write("\u0180\u0181\7)\2\2\u0181\u0185\3\2\2\2\u0182\u0183\7")
+ buf.write("(\2\2\u0183\u0185\7)\2\2\u0184\u0174\3\2\2\2\u0184\u0178")
+ buf.write("\3\2\2\2\u0184\u017a\3\2\2\2\u0184\u017e\3\2\2\2\u0184")
+ buf.write("\u0182\3\2\2\2\u0185\61\3\2\2\2\u0186\u0188\7,\2\2\u0187")
+ buf.write("\u0189\5*\26\2\u0188\u0187\3\2\2\2\u0189\u018a\3\2\2\2")
+ buf.write("\u018a\u0188\3\2\2\2\u018a\u018b\3\2\2\2\u018b\u018d\3")
+ buf.write("\2\2\2\u018c\u018e\5\62\32\2\u018d\u018c\3\2\2\2\u018d")
+ buf.write("\u018e\3\2\2\2\u018e\u0193\3\2\2\2\u018f\u0190\7,\2\2")
+ buf.write("\u0190\u0193\5\62\32\2\u0191\u0193\7,\2\2\u0192\u0186")
+ buf.write("\3\2\2\2\u0192\u018f\3\2\2\2\u0192\u0191\3\2\2\2\u0193")
+ buf.write("\63\3\2\2\2\u0194\u019a\5\66\34\2\u0195\u0197\7\6\2\2")
+ buf.write("\u0196\u0198\7\37\2\2\u0197\u0196\3\2\2\2\u0197\u0198")
+ buf.write("\3\2\2\2\u0198\u0199\3\2\2\2\u0199\u019b\7-\2\2\u019a")
+ buf.write("\u0195\3\2\2\2\u019a\u019b\3\2\2\2\u019b\65\3\2\2\2\u019c")
+ buf.write("\u01a4\58\35\2\u019d\u019f\7\6\2\2\u019e\u01a0\7\37\2")
+ buf.write("\2\u019f\u019e\3\2\2\2\u019f\u01a0\3\2\2\2\u01a0\u01a1")
+ buf.write("\3\2\2\2\u01a1\u01a3\58\35\2\u01a2\u019d\3\2\2\2\u01a3")
+ buf.write("\u01a6\3\2\2\2\u01a4\u01a2\3\2\2\2\u01a4\u01a5\3\2\2\2")
+ buf.write("\u01a5\67\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a7\u01ac\5\b")
+ buf.write("\5\2\u01a8\u01ab\5,\27\2\u01a9\u01ab\5> \2\u01aa\u01a8")
+ buf.write("\3\2\2\2\u01aa\u01a9\3\2\2\2\u01ab\u01ae\3\2\2\2\u01ac")
+ buf.write("\u01aa\3\2\2\2\u01ac\u01ad\3\2\2\2\u01ad\u01b0\3\2\2\2")
+ buf.write("\u01ae\u01ac\3\2\2\2\u01af\u01b1\7\37\2\2\u01b0\u01af")
+ buf.write("\3\2\2\2\u01b0\u01b1\3\2\2\2\u01b1\u01ba\3\2\2\2\u01b2")
+ buf.write("\u01b4\5\62\32\2\u01b3\u01b2\3\2\2\2\u01b4\u01b7\3\2\2")
+ buf.write("\2\u01b5\u01b3\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6\u01b8")
+ buf.write("\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b8\u01ba\7_\2\2\u01b9")
+ buf.write("\u01a7\3\2\2\2\u01b9\u01b5\3\2\2\2\u01ba9\3\2\2\2\u01bb")
+ buf.write("\u01c0\7_\2\2\u01bc\u01bd\7\6\2\2\u01bd\u01bf\7_\2\2\u01be")
+ buf.write("\u01bc\3\2\2\2\u01bf\u01c2\3\2\2\2\u01c0\u01be\3\2\2\2")
+ buf.write("\u01c0\u01c1\3\2\2\2\u01c1;\3\2\2\2\u01c2\u01c0\3\2\2")
+ buf.write("\2\u01c3\u01c5\5\36\20\2\u01c4\u01c6\5> \2\u01c5\u01c4")
+ buf.write("\3\2\2\2\u01c5\u01c6\3\2\2\2\u01c6\u01c9\3\2\2\2\u01c7")
+ buf.write("\u01c9\5\24\13\2\u01c8\u01c3\3\2\2\2\u01c8\u01c7\3\2\2")
+ buf.write("\2\u01c9=\3\2\2\2\u01ca\u01cc\5\62\32\2\u01cb\u01cd\5")
+ buf.write("@!\2\u01cc\u01cb\3\2\2\2\u01cc\u01cd\3\2\2\2\u01cd\u01d0")
+ buf.write("\3\2\2\2\u01ce\u01d0\5@!\2\u01cf\u01ca\3\2\2\2\u01cf\u01ce")
+ buf.write("\3\2\2\2\u01d0?\3\2\2\2\u01d1\u01d2\7(\2\2\u01d2\u01d3")
+ buf.write("\5> \2\u01d3\u01d4\7)\2\2\u01d4\u01d7\3\2\2\2\u01d5\u01d7")
+ buf.write("\5B\"\2\u01d6\u01d1\3\2\2\2\u01d6\u01d5\3\2\2\2\u01d7")
+ buf.write("\u01db\3\2\2\2\u01d8\u01da\5B\"\2\u01d9\u01d8\3\2\2\2")
+ buf.write("\u01da\u01dd\3\2\2\2\u01db\u01d9\3\2\2\2\u01db\u01dc\3")
+ buf.write("\2\2\2\u01dcA\3\2\2\2\u01dd\u01db\3\2\2\2\u01de\u01df")
+ buf.write("\7*\2\2\u01df\u01eb\7+\2\2\u01e0\u01e1\7*\2\2\u01e1\u01e2")
+ buf.write("\5^\60\2\u01e2\u01e3\7+\2\2\u01e3\u01eb\3\2\2\2\u01e4")
+ buf.write("\u01e5\7(\2\2\u01e5\u01eb\7)\2\2\u01e6\u01e7\7(\2\2\u01e7")
+ buf.write("\u01e8\5\64\33\2\u01e8\u01e9\7)\2\2\u01e9\u01eb\3\2\2")
+ buf.write("\2\u01ea\u01de\3\2\2\2\u01ea\u01e0\3\2\2\2\u01ea\u01e4")
+ buf.write("\3\2\2\2\u01ea\u01e6\3\2\2\2\u01ebC\3\2\2\2\u01ec\u01f5")
+ buf.write("\5`\61\2\u01ed\u01ee\7\3\2\2\u01ee\u01f0\5F$\2\u01ef\u01f1")
+ buf.write("\7\6\2\2\u01f0\u01ef\3\2\2\2\u01f0\u01f1\3\2\2\2\u01f1")
+ buf.write("\u01f2\3\2\2\2\u01f2\u01f3\7\26\2\2\u01f3\u01f5\3\2\2")
+ buf.write("\2\u01f4\u01ec\3\2\2\2\u01f4\u01ed\3\2\2\2\u01f5E\3\2")
+ buf.write("\2\2\u01f6\u01fb\5D#\2\u01f7\u01f8\7\6\2\2\u01f8\u01fa")
+ buf.write("\5D#\2\u01f9\u01f7\3\2\2\2\u01fa\u01fd\3\2\2\2\u01fb\u01f9")
+ buf.write("\3\2\2\2\u01fb\u01fc\3\2\2\2\u01fcG\3\2\2\2\u01fd\u01fb")
+ buf.write("\3\2\2\2\u01fe\u0200\5`\61\2\u01ff\u0201\7\37\2\2\u0200")
+ buf.write("\u01ff\3\2\2\2\u0200\u0201\3\2\2\2\u0201\u0209\3\2\2\2")
+ buf.write("\u0202\u0203\7\6\2\2\u0203\u0205\5`\61\2\u0204\u0206\7")
+ buf.write("\37\2\2\u0205\u0204\3\2\2\2\u0205\u0206\3\2\2\2\u0206")
+ buf.write("\u0208\3\2\2\2\u0207\u0202\3\2\2\2\u0208\u020b\3\2\2\2")
+ buf.write("\u0209\u0207\3\2\2\2\u0209\u020a\3\2\2\2\u020aI\3\2\2")
+ buf.write("\2\u020b\u0209\3\2\2\2\u020c\u0213\5L\'\2\u020d\u020e")
+ buf.write("\7.\2\2\u020e\u0212\5L\'\2\u020f\u0210\7/\2\2\u0210\u0212")
+ buf.write("\5L\'\2\u0211\u020d\3\2\2\2\u0211\u020f\3\2\2\2\u0212")
+ buf.write("\u0215\3\2\2\2\u0213\u0211\3\2\2\2\u0213\u0214\3\2\2\2")
+ buf.write("\u0214K\3\2\2\2\u0215\u0213\3\2\2\2\u0216\u021f\5N(\2")
+ buf.write("\u0217\u0218\7,\2\2\u0218\u021e\5N(\2\u0219\u021a\7\60")
+ buf.write("\2\2\u021a\u021e\5N(\2\u021b\u021c\7\61\2\2\u021c\u021e")
+ buf.write("\5N(\2\u021d\u0217\3\2\2\2\u021d\u0219\3\2\2\2\u021d\u021b")
+ buf.write("\3\2\2\2\u021e\u0221\3\2\2\2\u021f\u021d\3\2\2\2\u021f")
+ buf.write("\u0220\3\2\2\2\u0220M\3\2\2\2\u0221\u021f\3\2\2\2\u0222")
+ buf.write("\u0223\7(\2\2\u0223\u0224\5<\37\2\u0224\u0225\7)\2\2\u0225")
+ buf.write("\u0226\5N(\2\u0226\u0229\3\2\2\2\u0227\u0229\5P)\2\u0228")
+ buf.write("\u0222\3\2\2\2\u0228\u0227\3\2\2\2\u0229O\3\2\2\2\u022a")
+ buf.write("\u023a\5R*\2\u022b\u022c\7\62\2\2\u022c\u023a\5P)\2\u022d")
+ buf.write("\u022e\7\63\2\2\u022e\u023a\5P)\2\u022f\u0230\5V,\2\u0230")
+ buf.write("\u0231\5N(\2\u0231\u023a\3\2\2\2\u0232\u0233\7\64\2\2")
+ buf.write("\u0233\u023a\5P)\2\u0234\u0235\7\64\2\2\u0235\u0236\7")
+ buf.write("(\2\2\u0236\u0237\5<\37\2\u0237\u0238\7)\2\2\u0238\u023a")
+ buf.write("\3\2\2\2\u0239\u022a\3\2\2\2\u0239\u022b\3\2\2\2\u0239")
+ buf.write("\u022d\3\2\2\2\u0239\u022f\3\2\2\2\u0239\u0232\3\2\2\2")
+ buf.write("\u0239\u0234\3\2\2\2\u023aQ\3\2\2\2\u023b\u023c\5X-\2")
+ buf.write("\u023c\u025a\b*\1\2\u023d\u023e\7*\2\2\u023e\u023f\5\\")
+ buf.write("/\2\u023f\u0240\7+\2\2\u0240\u0259\3\2\2\2\u0241\u0242")
+ buf.write("\7(\2\2\u0242\u0243\7)\2\2\u0243\u0259\b*\1\2\u0244\u0245")
+ buf.write("\7(\2\2\u0245\u0246\5H%\2\u0246\u0247\7)\2\2\u0247\u0248")
+ buf.write("\b*\1\2\u0248\u0259\3\2\2\2\u0249\u024a\7(\2\2\u024a\u024b")
+ buf.write("\5T+\2\u024b\u024c\7)\2\2\u024c\u0259\3\2\2\2\u024d\u024e")
+ buf.write("\7\65\2\2\u024e\u024f\7_\2\2\u024f\u0259\b*\1\2\u0250")
+ buf.write("\u0251\7,\2\2\u0251\u0252\7_\2\2\u0252\u0259\b*\1\2\u0253")
+ buf.write("\u0254\7\66\2\2\u0254\u0255\7_\2\2\u0255\u0259\b*\1\2")
+ buf.write("\u0256\u0259\7\62\2\2\u0257\u0259\7\63\2\2\u0258\u023d")
+ buf.write("\3\2\2\2\u0258\u0241\3\2\2\2\u0258\u0244\3\2\2\2\u0258")
+ buf.write("\u0249\3\2\2\2\u0258\u024d\3\2\2\2\u0258\u0250\3\2\2\2")
+ buf.write("\u0258\u0253\3\2\2\2\u0258\u0256\3\2\2\2\u0258\u0257\3")
+ buf.write("\2\2\2\u0259\u025c\3\2\2\2\u025a\u0258\3\2\2\2\u025a\u025b")
+ buf.write("\3\2\2\2\u025bS\3\2\2\2\u025c\u025a\3\2\2\2\u025d\u0262")
+ buf.write("\58\35\2\u025e\u025f\7\6\2\2\u025f\u0261\58\35\2\u0260")
+ buf.write("\u025e\3\2\2\2\u0261\u0264\3\2\2\2\u0262\u0260\3\2\2\2")
+ buf.write("\u0262\u0263\3\2\2\2\u0263U\3\2\2\2\u0264\u0262\3\2\2")
+ buf.write("\2\u0265\u0266\t\5\2\2\u0266W\3\2\2\2\u0267\u026e\7_\2")
+ buf.write("\2\u0268\u026e\5Z.\2\u0269\u026a\7(\2\2\u026a\u026b\5")
+ buf.write("\\/\2\u026b\u026c\7)\2\2\u026c\u026e\3\2\2\2\u026d\u0267")
+ buf.write("\3\2\2\2\u026d\u0268\3\2\2\2\u026d\u0269\3\2\2\2\u026e")
+ buf.write("Y\3\2\2\2\u026f\u028a\7b\2\2\u0270\u028a\7d\2\2\u0271")
+ buf.write("\u028a\7c\2\2\u0272\u028a\7`\2\2\u0273\u0275\7_\2\2\u0274")
+ buf.write("\u0273\3\2\2\2\u0275\u0278\3\2\2\2\u0276\u0274\3\2\2\2")
+ buf.write("\u0276\u0277\3\2\2\2\u0277\u027a\3\2\2\2\u0278\u0276\3")
+ buf.write("\2\2\2\u0279\u027b\7a\2\2\u027a\u0279\3\2\2\2\u027b\u027c")
+ buf.write("\3\2\2\2\u027c\u027a\3\2\2\2\u027c\u027d\3\2\2\2\u027d")
+ buf.write("\u027f\3\2\2\2\u027e\u0276\3\2\2\2\u027f\u0280\3\2\2\2")
+ buf.write("\u0280\u027e\3\2\2\2\u0280\u0281\3\2\2\2\u0281\u0285\3")
+ buf.write("\2\2\2\u0282\u0284\7_\2\2\u0283\u0282\3\2\2\2\u0284\u0287")
+ buf.write("\3\2\2\2\u0285\u0283\3\2\2\2\u0285\u0286\3\2\2\2\u0286")
+ buf.write("\u028a\3\2\2\2\u0287\u0285\3\2\2\2\u0288\u028a\7e\2\2")
+ buf.write("\u0289\u026f\3\2\2\2\u0289\u0270\3\2\2\2\u0289\u0271\3")
+ buf.write("\2\2\2\u0289\u0272\3\2\2\2\u0289\u027e\3\2\2\2\u0289\u0288")
+ buf.write("\3\2\2\2\u028a[\3\2\2\2\u028b\u0290\5`\61\2\u028c\u028d")
+ buf.write("\7\6\2\2\u028d\u028f\5`\61\2\u028e\u028c\3\2\2\2\u028f")
+ buf.write("\u0292\3\2\2\2\u0290\u028e\3\2\2\2\u0290\u0291\3\2\2\2")
+ buf.write("\u0291]\3\2\2\2\u0292\u0290\3\2\2\2\u0293\u0294\5f\64")
+ buf.write("\2\u0294_\3\2\2\2\u0295\u0296\5b\62\2\u0296\u0297\5d\63")
+ buf.write("\2\u0297\u0298\5`\61\2\u0298\u029b\3\2\2\2\u0299\u029b")
+ buf.write("\5f\64\2\u029a\u0295\3\2\2\2\u029a\u0299\3\2\2\2\u029b")
+ buf.write("a\3\2\2\2\u029c\u029d\5P)\2\u029dc\3\2\2\2\u029e\u029f")
+ buf.write("\t\6\2\2\u029fe\3\2\2\2\u02a0\u02a7\5h\65\2\u02a1\u02a2")
+ buf.write("\7D\2\2\u02a2\u02a3\5\\/\2\u02a3\u02a4\7\31\2\2\u02a4")
+ buf.write("\u02a5\5f\64\2\u02a5\u02a6\b\64\1\2\u02a6\u02a8\3\2\2")
+ buf.write("\2\u02a7\u02a1\3\2\2\2\u02a7\u02a8\3\2\2\2\u02a8g\3\2")
+ buf.write("\2\2\u02a9\u02ae\5j\66\2\u02aa\u02ab\7E\2\2\u02ab\u02ad")
+ buf.write("\5j\66\2\u02ac\u02aa\3\2\2\2\u02ad\u02b0\3\2\2\2\u02ae")
+ buf.write("\u02ac\3\2\2\2\u02ae\u02af\3\2\2\2\u02afi\3\2\2\2\u02b0")
+ buf.write("\u02ae\3\2\2\2\u02b1\u02b6\5l\67\2\u02b2\u02b3\7F\2\2")
+ buf.write("\u02b3\u02b5\5l\67\2\u02b4\u02b2\3\2\2\2\u02b5\u02b8\3")
+ buf.write("\2\2\2\u02b6\u02b4\3\2\2\2\u02b6\u02b7\3\2\2\2\u02b7k")
+ buf.write("\3\2\2\2\u02b8\u02b6\3\2\2\2\u02b9\u02be\5n8\2\u02ba\u02bb")
+ buf.write("\7G\2\2\u02bb\u02bd\5n8\2\u02bc\u02ba\3\2\2\2\u02bd\u02c0")
+ buf.write("\3\2\2\2\u02be\u02bc\3\2\2\2\u02be\u02bf\3\2\2\2\u02bf")
+ buf.write("m\3\2\2\2\u02c0\u02be\3\2\2\2\u02c1\u02c6\5p9\2\u02c2")
+ buf.write("\u02c3\7H\2\2\u02c3\u02c5\5p9\2\u02c4\u02c2\3\2\2\2\u02c5")
+ buf.write("\u02c8\3\2\2\2\u02c6\u02c4\3\2\2\2\u02c6\u02c7\3\2\2\2")
+ buf.write("\u02c7o\3\2\2\2\u02c8\u02c6\3\2\2\2\u02c9\u02ce\5r:\2")
+ buf.write("\u02ca\u02cb\7\67\2\2\u02cb\u02cd\5r:\2\u02cc\u02ca\3")
+ buf.write("\2\2\2\u02cd\u02d0\3\2\2\2\u02ce\u02cc\3\2\2\2\u02ce\u02cf")
+ buf.write("\3\2\2\2\u02cfq\3\2\2\2\u02d0\u02ce\3\2\2\2\u02d1\u02d6")
+ buf.write("\5t;\2\u02d2\u02d3\t\7\2\2\u02d3\u02d5\5t;\2\u02d4\u02d2")
+ buf.write("\3\2\2\2\u02d5\u02d8\3\2\2\2\u02d6\u02d4\3\2\2\2\u02d6")
+ buf.write("\u02d7\3\2\2\2\u02d7s\3\2\2\2\u02d8\u02d6\3\2\2\2\u02d9")
+ buf.write("\u02de\5v<\2\u02da\u02db\t\b\2\2\u02db\u02dd\5v<\2\u02dc")
+ buf.write("\u02da\3\2\2\2\u02dd\u02e0\3\2\2\2\u02de\u02dc\3\2\2\2")
+ buf.write("\u02de\u02df\3\2\2\2\u02dfu\3\2\2\2\u02e0\u02de\3\2\2")
+ buf.write("\2\u02e1\u02e6\5J&\2\u02e2\u02e3\t\t\2\2\u02e3\u02e5\5")
+ buf.write("J&\2\u02e4\u02e2\3\2\2\2\u02e5\u02e8\3\2\2\2\u02e6\u02e4")
+ buf.write("\3\2\2\2\u02e6\u02e7\3\2\2\2\u02e7w\3\2\2\2\u02e8\u02e6")
+ buf.write("\3\2\2\2\u02e9\u02f5\5\u0082B\2\u02ea\u02f5\5\u0084C\2")
+ buf.write("\u02eb\u02f5\5\u0088E\2\u02ec\u02f5\5\u008aF\2\u02ed\u02f5")
+ buf.write("\5\u008cG\2\u02ee\u02f5\5\u008eH\2\u02ef\u02f5\5\u0080")
+ buf.write("A\2\u02f0\u02f5\5z>\2\u02f1\u02f5\5|?\2\u02f2\u02f5\5")
+ buf.write("~@\2\u02f3\u02f5\5\n\6\2\u02f4\u02e9\3\2\2\2\u02f4\u02ea")
+ buf.write("\3\2\2\2\u02f4\u02eb\3\2\2\2\u02f4\u02ec\3\2\2\2\u02f4")
+ buf.write("\u02ed\3\2\2\2\u02f4\u02ee\3\2\2\2\u02f4\u02ef\3\2\2\2")
+ buf.write("\u02f4\u02f0\3\2\2\2\u02f4\u02f1\3\2\2\2\u02f4\u02f2\3")
+ buf.write("\2\2\2\u02f4\u02f3\3\2\2\2\u02f5y\3\2\2\2\u02f6\u02f8")
+ buf.write("\7Q\2\2\u02f7\u02f6\3\2\2\2\u02f7\u02f8\3\2\2\2\u02f8")
+ buf.write("\u02f9\3\2\2\2\u02f9\u02fa\7_\2\2\u02fa\u02fe\7(\2\2\u02fb")
+ buf.write("\u02fd\n\n\2\2\u02fc\u02fb\3\2\2\2\u02fd\u0300\3\2\2\2")
+ buf.write("\u02fe\u02fc\3\2\2\2\u02fe\u02ff\3\2\2\2\u02ff\u0301\3")
+ buf.write("\2\2\2\u0300\u02fe\3\2\2\2\u0301\u0302\7)\2\2\u0302\u0303")
+ buf.write("\7\4\2\2\u0303{\3\2\2\2\u0304\u0305\7R\2\2\u0305\u0309")
+ buf.write("\7\3\2\2\u0306\u0308\n\13\2\2\u0307\u0306\3\2\2\2\u0308")
+ buf.write("\u030b\3\2\2\2\u0309\u0307\3\2\2\2\u0309\u030a\3\2\2\2")
+ buf.write("\u030a\u030c\3\2\2\2\u030b\u0309\3\2\2\2\u030c\u030d\7")
+ buf.write("\26\2\2\u030d}\3\2\2\2\u030e\u030f\7S\2\2\u030f\u0313")
+ buf.write("\7\3\2\2\u0310\u0312\n\13\2\2\u0311\u0310\3\2\2\2\u0312")
+ buf.write("\u0315\3\2\2\2\u0313\u0311\3\2\2\2\u0313\u0314\3\2\2\2")
+ buf.write("\u0314\u0316\3\2\2\2\u0315\u0313\3\2\2\2\u0316\u0317\7")
+ buf.write("\26\2\2\u0317\177\3\2\2\2\u0318\u0319\7_\2\2\u0319\u031d")
+ buf.write("\7(\2\2\u031a\u031c\5\n\6\2\u031b\u031a\3\2\2\2\u031c")
+ buf.write("\u031f\3\2\2\2\u031d\u031b\3\2\2\2\u031d\u031e\3\2\2\2")
+ buf.write("\u031e\u0321\3\2\2\2\u031f\u031d\3\2\2\2\u0320\u0322\5")
+ buf.write("\u0086D\2\u0321\u0320\3\2\2\2\u0321\u0322\3\2\2\2\u0322")
+ buf.write("\u0324\3\2\2\2\u0323\u0325\5\\/\2\u0324\u0323\3\2\2\2")
+ buf.write("\u0324\u0325\3\2\2\2\u0325\u0326\3\2\2\2\u0326\u0327\7")
+ buf.write(")\2\2\u0327\u0081\3\2\2\2\u0328\u0329\7_\2\2\u0329\u032a")
+ buf.write("\7\31\2\2\u032a\u0334\5x=\2\u032b\u032c\7T\2\2\u032c\u032d")
+ buf.write("\5^\60\2\u032d\u032e\7\31\2\2\u032e\u032f\5x=\2\u032f")
+ buf.write("\u0334\3\2\2\2\u0330\u0331\7U\2\2\u0331\u0332\7\31\2\2")
+ buf.write("\u0332\u0334\5x=\2\u0333\u0328\3\2\2\2\u0333\u032b\3\2")
+ buf.write("\2\2\u0333\u0330\3\2\2\2\u0334\u0083\3\2\2\2\u0335\u0339")
+ buf.write("\7\3\2\2\u0336\u0338\5\n\6\2\u0337\u0336\3\2\2\2\u0338")
+ buf.write("\u033b\3\2\2\2\u0339\u0337\3\2\2\2\u0339\u033a\3\2\2\2")
+ buf.write("\u033a\u033d\3\2\2\2\u033b\u0339\3\2\2\2\u033c\u033e\5")
+ buf.write("\u0086D\2\u033d\u033c\3\2\2\2\u033d\u033e\3\2\2\2\u033e")
+ buf.write("\u033f\3\2\2\2\u033f\u0340\7\26\2\2\u0340\u0085\3\2\2")
+ buf.write("\2\u0341\u0343\5x=\2\u0342\u0341\3\2\2\2\u0343\u0344\3")
+ buf.write("\2\2\2\u0344\u0342\3\2\2\2\u0344\u0345\3\2\2\2\u0345\u0087")
+ buf.write("\3\2\2\2\u0346\u034b\7\4\2\2\u0347\u0348\5\\/\2\u0348")
+ buf.write("\u0349\7\4\2\2\u0349\u034b\3\2\2\2\u034a\u0346\3\2\2\2")
+ buf.write("\u034a\u0347\3\2\2\2\u034b\u0089\3\2\2\2\u034c\u034d\7")
+ buf.write("V\2\2\u034d\u034e\7(\2\2\u034e\u034f\5\\/\2\u034f\u0350")
+ buf.write("\7)\2\2\u0350\u0351\bF\1\2\u0351\u0354\5x=\2\u0352\u0353")
+ buf.write("\7W\2\2\u0353\u0355\5x=\2\u0354\u0352\3\2\2\2\u0354\u0355")
+ buf.write("\3\2\2\2\u0355\u035d\3\2\2\2\u0356\u0357\7X\2\2\u0357")
+ buf.write("\u0358\7(\2\2\u0358\u0359\5\\/\2\u0359\u035a\7)\2\2\u035a")
+ buf.write("\u035b\5x=\2\u035b\u035d\3\2\2\2\u035c\u034c\3\2\2\2\u035c")
+ buf.write("\u0356\3\2\2\2\u035d\u008b\3\2\2\2\u035e\u035f\7Y\2\2")
+ buf.write("\u035f\u0360\7(\2\2\u0360\u0361\5\\/\2\u0361\u0362\7)")
+ buf.write("\2\2\u0362\u0363\5x=\2\u0363\u0364\bG\1\2\u0364\u036f")
+ buf.write("\3\2\2\2\u0365\u0366\7Z\2\2\u0366\u0367\5x=\2\u0367\u0368")
+ buf.write("\7Y\2\2\u0368\u0369\7(\2\2\u0369\u036a\5\\/\2\u036a\u036b")
+ buf.write("\7)\2\2\u036b\u036c\7\4\2\2\u036c\u036d\bG\1\2\u036d\u036f")
+ buf.write("\3\2\2\2\u036e\u035e\3\2\2\2\u036e\u0365\3\2\2\2\u036f")
+ buf.write("\u008d\3\2\2\2\u0370\u0371\7[\2\2\u0371\u0372\7_\2\2\u0372")
+ buf.write("\u037e\7\4\2\2\u0373\u0374\7\\\2\2\u0374\u037e\7\4\2\2")
+ buf.write("\u0375\u0376\7]\2\2\u0376\u037e\7\4\2\2\u0377\u0378\7")
+ buf.write("^\2\2\u0378\u037e\7\4\2\2\u0379\u037a\7^\2\2\u037a\u037b")
+ buf.write("\5\\/\2\u037b\u037c\7\4\2\2\u037c\u037e\3\2\2\2\u037d")
+ buf.write("\u0370\3\2\2\2\u037d\u0373\3\2\2\2\u037d\u0375\3\2\2\2")
+ buf.write("\u037d\u0377\3\2\2\2\u037d\u0379\3\2\2\2\u037e\u008f\3")
+ buf.write("\2\2\2o\u0093\u0097\u009d\u00a6\u00a8\u00ab\u00b1\u00b6")
+ buf.write("\u00bd\u00bf\u00c3\u00cb\u00d0\u00d7\u00dd\u00f4\u00f9")
+ buf.write("\u00ff\u0108\u010f\u0117\u0119\u0120\u0126\u012a\u0130")
+ buf.write("\u0139\u013f\u0146\u014c\u0151\u0154\u0157\u015a\u015e")
+ buf.write("\u0164\u0169\u0170\u0172\u0184\u018a\u018d\u0192\u0197")
+ buf.write("\u019a\u019f\u01a4\u01aa\u01ac\u01b0\u01b5\u01b9\u01c0")
+ buf.write("\u01c5\u01c8\u01cc\u01cf\u01d6\u01db\u01ea\u01f0\u01f4")
+ buf.write("\u01fb\u0200\u0205\u0209\u0211\u0213\u021d\u021f\u0228")
+ buf.write("\u0239\u0258\u025a\u0262\u026d\u0276\u027c\u0280\u0285")
+ buf.write("\u0289\u0290\u029a\u02a7\u02ae\u02b6\u02be\u02c6\u02ce")
+ buf.write("\u02d6\u02de\u02e6\u02f4\u02f7\u02fe\u0309\u0313\u031d")
+ buf.write("\u0321\u0324\u0333\u0339\u033d\u0344\u034a\u0354\u035c")
+ buf.write("\u036e\u037d")
+ return buf.getvalue()
+
+
+class CParser ( Parser ):
+
+ grammarFileName = "C.g4"
+
+ atn = ATNDeserializer().deserialize(serializedATN())
+
+ decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
+
+ sharedContextCache = PredictionContextCache()
+
+ literalNames = [ "<INVALID>", "'{'", "';'", "'typedef'", "','", "'='",
+ "'extern'", "'static'", "'auto'", "'register'", "'STATIC'",
+ "'void'", "'char'", "'short'", "'int'", "'long'", "'float'",
+ "'double'", "'signed'", "'unsigned'", "'}'", "'struct'",
+ "'union'", "':'", "'enum'", "'const'", "'volatile'",
+ "'IN'", "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'",
+ "'VOLATILE'", "'GLOBAL_REMOVE_IF_UNREFERENCED'", "'EFIAPI'",
+ "'EFI_BOOTSERVICE'", "'EFI_RUNTIMESERVICE'", "'PACKED'",
+ "'('", "')'", "'['", "']'", "'*'", "'...'", "'+'",
+ "'-'", "'/'", "'%'", "'++'", "'--'", "'sizeof'", "'.'",
+ "'->'", "'&'", "'~'", "'!'", "'*='", "'/='", "'%='",
+ "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
+ "'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='",
+ "'<'", "'>'", "'<='", "'>='", "'<<'", "'>>'", "'__asm__'",
+ "'_asm'", "'__asm'", "'case'", "'default'", "'if'",
+ "'else'", "'switch'", "'while'", "'do'", "'goto'",
+ "'continue'", "'break'", "'return'" ]
+
+ symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "<INVALID>", "IDENTIFIER", "CHARACTER_LITERAL", "STRING_LITERAL",
+ "HEX_LITERAL", "DECIMAL_LITERAL", "OCTAL_LITERAL",
+ "FLOATING_POINT_LITERAL", "WS", "BS", "UnicodeVocabulary",
+ "COMMENT", "LINE_COMMENT", "LINE_COMMAND" ]
+
+ RULE_translation_unit = 0
+ RULE_external_declaration = 1
+ RULE_function_definition = 2
+ RULE_declaration_specifiers = 3
+ RULE_declaration = 4
+ RULE_init_declarator_list = 5
+ RULE_init_declarator = 6
+ RULE_storage_class_specifier = 7
+ RULE_type_specifier = 8
+ RULE_type_id = 9
+ RULE_struct_or_union_specifier = 10
+ RULE_struct_or_union = 11
+ RULE_struct_declaration_list = 12
+ RULE_struct_declaration = 13
+ RULE_specifier_qualifier_list = 14
+ RULE_struct_declarator_list = 15
+ RULE_struct_declarator = 16
+ RULE_enum_specifier = 17
+ RULE_enumerator_list = 18
+ RULE_enumerator = 19
+ RULE_type_qualifier = 20
+ RULE_declarator = 21
+ RULE_direct_declarator = 22
+ RULE_declarator_suffix = 23
+ RULE_pointer = 24
+ RULE_parameter_type_list = 25
+ RULE_parameter_list = 26
+ RULE_parameter_declaration = 27
+ RULE_identifier_list = 28
+ RULE_type_name = 29
+ RULE_abstract_declarator = 30
+ RULE_direct_abstract_declarator = 31
+ RULE_abstract_declarator_suffix = 32
+ RULE_initializer = 33
+ RULE_initializer_list = 34
+ RULE_argument_expression_list = 35
+ RULE_additive_expression = 36
+ RULE_multiplicative_expression = 37
+ RULE_cast_expression = 38
+ RULE_unary_expression = 39
+ RULE_postfix_expression = 40
+ RULE_macro_parameter_list = 41
+ RULE_unary_operator = 42
+ RULE_primary_expression = 43
+ RULE_constant = 44
+ RULE_expression = 45
+ RULE_constant_expression = 46
+ RULE_assignment_expression = 47
+ RULE_lvalue = 48
+ RULE_assignment_operator = 49
+ RULE_conditional_expression = 50
+ RULE_logical_or_expression = 51
+ RULE_logical_and_expression = 52
+ RULE_inclusive_or_expression = 53
+ RULE_exclusive_or_expression = 54
+ RULE_and_expression = 55
+ RULE_equality_expression = 56
+ RULE_relational_expression = 57
+ RULE_shift_expression = 58
+ RULE_statement = 59
+ RULE_asm2_statement = 60
+ RULE_asm1_statement = 61
+ RULE_asm_statement = 62
+ RULE_macro_statement = 63
+ RULE_labeled_statement = 64
+ RULE_compound_statement = 65
+ RULE_statement_list = 66
+ RULE_expression_statement = 67
+ RULE_selection_statement = 68
+ RULE_iteration_statement = 69
+ RULE_jump_statement = 70
+
+ ruleNames = [ "translation_unit", "external_declaration", "function_definition",
+ "declaration_specifiers", "declaration", "init_declarator_list",
+ "init_declarator", "storage_class_specifier", "type_specifier",
+ "type_id", "struct_or_union_specifier", "struct_or_union",
+ "struct_declaration_list", "struct_declaration", "specifier_qualifier_list",
+ "struct_declarator_list", "struct_declarator", "enum_specifier",
+ "enumerator_list", "enumerator", "type_qualifier", "declarator",
+ "direct_declarator", "declarator_suffix", "pointer",
+ "parameter_type_list", "parameter_list", "parameter_declaration",
+ "identifier_list", "type_name", "abstract_declarator",
+ "direct_abstract_declarator", "abstract_declarator_suffix",
+ "initializer", "initializer_list", "argument_expression_list",
+ "additive_expression", "multiplicative_expression", "cast_expression",
+ "unary_expression", "postfix_expression", "macro_parameter_list",
+ "unary_operator", "primary_expression", "constant", "expression",
+ "constant_expression", "assignment_expression", "lvalue",
+ "assignment_operator", "conditional_expression", "logical_or_expression",
+ "logical_and_expression", "inclusive_or_expression",
+ "exclusive_or_expression", "and_expression", "equality_expression",
+ "relational_expression", "shift_expression", "statement",
+ "asm2_statement", "asm1_statement", "asm_statement",
+ "macro_statement", "labeled_statement", "compound_statement",
+ "statement_list", "expression_statement", "selection_statement",
+ "iteration_statement", "jump_statement" ]
+
+ EOF = Token.EOF
+ T__0=1
+ T__1=2
+ T__2=3
+ T__3=4
+ T__4=5
+ T__5=6
+ T__6=7
+ T__7=8
+ T__8=9
+ T__9=10
+ T__10=11
+ T__11=12
+ T__12=13
+ T__13=14
+ T__14=15
+ T__15=16
+ T__16=17
+ T__17=18
+ T__18=19
+ T__19=20
+ T__20=21
+ T__21=22
+ T__22=23
+ T__23=24
+ T__24=25
+ T__25=26
+ T__26=27
+ T__27=28
+ T__28=29
+ T__29=30
+ T__30=31
+ T__31=32
+ T__32=33
+ T__33=34
+ T__34=35
+ T__35=36
+ T__36=37
+ T__37=38
+ T__38=39
+ T__39=40
+ T__40=41
+ T__41=42
+ T__42=43
+ T__43=44
+ T__44=45
+ T__45=46
+ T__46=47
+ T__47=48
+ T__48=49
+ T__49=50
+ T__50=51
+ T__51=52
+ T__52=53
+ T__53=54
+ T__54=55
+ T__55=56
+ T__56=57
+ T__57=58
+ T__58=59
+ T__59=60
+ T__60=61
+ T__61=62
+ T__62=63
+ T__63=64
+ T__64=65
+ T__65=66
+ T__66=67
+ T__67=68
+ T__68=69
+ T__69=70
+ T__70=71
+ T__71=72
+ T__72=73
+ T__73=74
+ T__74=75
+ T__75=76
+ T__76=77
+ T__77=78
+ T__78=79
+ T__79=80
+ T__80=81
+ T__81=82
+ T__82=83
+ T__83=84
+ T__84=85
+ T__85=86
+ T__86=87
+ T__87=88
+ T__88=89
+ T__89=90
+ T__90=91
+ T__91=92
+ IDENTIFIER=93
+ CHARACTER_LITERAL=94
+ STRING_LITERAL=95
+ HEX_LITERAL=96
+ DECIMAL_LITERAL=97
+ OCTAL_LITERAL=98
+ FLOATING_POINT_LITERAL=99
+ WS=100
+ BS=101
+ UnicodeVocabulary=102
+ COMMENT=103
+ LINE_COMMENT=104
+ LINE_COMMAND=105
+
+ # @param input Type: TokenStream
+ # @param output= sys.stdout Type: TextIO
+ def __init__(self,input,output= sys.stdout):
+ super().__init__(input, output)
+ self.checkVersion("4.7.1")
+ self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
+ self._predicates = None
+
+
+
+
+ def printTokenInfo(self,line,offset,tokenText):
+ print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
+
+ def StorePredicateExpression(self,StartLine,StartOffset,EndLine,EndOffset,Text):
+ PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.PredicateExpressionList.append(PredExp)
+
+ def StoreEnumerationDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
+ EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.EnumerationDefinitionList.append(EnumDef)
+
+ def StoreStructUnionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
+ SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.StructUnionDefinitionList.append(SUDef)
+
+ def StoreTypedefDefinition(self,StartLine,StartOffset,EndLine,EndOffset,FromText,ToText):
+ Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.TypedefDefinitionList.append(Tdef)
+
+ def StoreFunctionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText,LeftBraceLine,LeftBraceOffset,DeclLine,DeclOffset):
+ FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
+ FileProfile.FunctionDefinitionList.append(FuncDef)
+
+ def StoreVariableDeclaration(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText):
+ VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.VariableDeclarationList.append(VarDecl)
+
+ def StoreFunctionCalling(self,StartLine,StartOffset,EndLine,EndOffset,FuncName,ParamList):
+ FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.FunctionCallingList.append(FuncCall)
+
+
+
+ class Translation_unitContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def external_declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.External_declarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.External_declarationContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_translation_unit
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterTranslation_unit" ):
+ listener.enterTranslation_unit(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitTranslation_unit" ):
+ listener.exitTranslation_unit(self)
+
+
+
+
+ def translation_unit(self):
+
+ localctx = CParser.Translation_unitContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 0, self.RULE_translation_unit)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 145
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__41))) != 0) or _la==CParser.IDENTIFIER:
+ self.state = 142
+ self.external_declaration()
+ self.state = 147
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class External_declarationContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def declarator(self):
+ return self.getTypedRuleContext(CParser.DeclaratorContext,0)
+
+
+ def declaration_specifiers(self):
+ return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
+
+
+ # @param i=None Type: int
+ def declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.DeclarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.DeclarationContext,i)
+
+
+ def function_definition(self):
+ return self.getTypedRuleContext(CParser.Function_definitionContext,0)
+
+
+ def macro_statement(self):
+ return self.getTypedRuleContext(CParser.Macro_statementContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_external_declaration
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterExternal_declaration" ):
+ listener.enterExternal_declaration(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitExternal_declaration" ):
+ listener.exitExternal_declaration(self)
+
+
+
+
+ def external_declaration(self):
+
+ localctx = CParser.External_declarationContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 2, self.RULE_external_declaration)
+ self._la = 0 # Token type
+ try:
+ self.state = 166
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,4,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 149
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,1,self._ctx)
+ if la_ == 1:
+ self.state = 148
+ self.declaration_specifiers()
+
+
+ self.state = 151
+ self.declarator()
+ self.state = 155
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0) or _la==CParser.IDENTIFIER:
+ self.state = 152
+ self.declaration()
+ self.state = 157
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 158
+ self.match(CParser.T__0)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 160
+ self.function_definition()
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 161
+ self.declaration()
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 162
+ self.macro_statement()
+ self.state = 164
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__1:
+ self.state = 163
+ self.match(CParser.T__1)
+
+
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Function_definitionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.ModifierText = ''
+ self.DeclText = ''
+ self.LBLine = 0
+ self.LBOffset = 0
+ self.DeclLine = 0
+ self.DeclOffset = 0
+ self.d = None # Declaration_specifiersContext
+ self._declaration_specifiers = None # Declaration_specifiersContext
+ self._declarator = None # DeclaratorContext
+ self.a = None # Compound_statementContext
+ self.b = None # Compound_statementContext
+
+ def declarator(self):
+ return self.getTypedRuleContext(CParser.DeclaratorContext,0)
+
+
+ def compound_statement(self):
+ return self.getTypedRuleContext(CParser.Compound_statementContext,0)
+
+
+ def declaration_specifiers(self):
+ return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
+
+
+ # @param i=None Type: int
+ def declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.DeclarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.DeclarationContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_function_definition
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterFunction_definition" ):
+ listener.enterFunction_definition(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitFunction_definition" ):
+ listener.exitFunction_definition(self)
+
+
+
+
+ def function_definition(self):
+
+ localctx = CParser.Function_definitionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 4, self.RULE_function_definition)
+
+ ModifierText = '';
+ DeclText = '';
+ LBLine = 0;
+ LBOffset = 0;
+ DeclLine = 0;
+ DeclOffset = 0;
+
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 169
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,5,self._ctx)
+ if la_ == 1:
+ self.state = 168
+ localctx.d = localctx._declaration_specifiers = self.declaration_specifiers()
+
+
+ self.state = 171
+ localctx._declarator = self.declarator()
+ self.state = 180
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__2, CParser.T__5, CParser.T__6, CParser.T__7, CParser.T__8, CParser.T__9, CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36, CParser.IDENTIFIER]:
+ self.state = 173
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while True:
+ self.state = 172
+ self.declaration()
+ self.state = 175
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0) or _la==CParser.IDENTIFIER):
+ break
+
+ self.state = 177
+ localctx.a = self.compound_statement()
+ pass
+ elif token in [CParser.T__0]:
+ self.state = 179
+ localctx.b = self.compound_statement()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+
+ if localctx.d != None:
+ ModifierText = (None if localctx._declaration_specifiers is None else self._input.getText((localctx._declaration_specifiers.start,localctx._declaration_specifiers.stop)))
+ else:
+ ModifierText = ''
+ DeclText = (None if localctx._declarator is None else self._input.getText((localctx._declarator.start,localctx._declarator.stop)))
+ DeclLine = (None if localctx._declarator is None else localctx._declarator.start).line
+ DeclOffset = (None if localctx._declarator is None else localctx._declarator.start).column
+ if localctx.a != None:
+ LBLine = (None if localctx.a is None else localctx.a.start).line
+ LBOffset = (None if localctx.a is None else localctx.a.start).column
+ else:
+ LBLine = (None if localctx.b is None else localctx.b.start).line
+ LBOffset = (None if localctx.b is None else localctx.b.start).column
+
+ self._ctx.stop = self._input.LT(-1)
+
+ self.StoreFunctionDefinition(localctx.start.line, localctx.start.column, localctx.stop.line, localctx.stop.column, ModifierText, DeclText, LBLine, LBOffset, DeclLine, DeclOffset)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Declaration_specifiersContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def storage_class_specifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Storage_class_specifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Storage_class_specifierContext,i)
+
+
+ # @param i=None Type: int
+ def type_specifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Type_specifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Type_specifierContext,i)
+
+
+ # @param i=None Type: int
+ def type_qualifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Type_qualifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_declaration_specifiers
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterDeclaration_specifiers" ):
+ listener.enterDeclaration_specifiers(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitDeclaration_specifiers" ):
+ listener.exitDeclaration_specifiers(self)
+
+
+
+
+ def declaration_specifiers(self):
+
+ localctx = CParser.Declaration_specifiersContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 6, self.RULE_declaration_specifiers)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 187
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 187
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__5, CParser.T__6, CParser.T__7, CParser.T__8, CParser.T__9]:
+ self.state = 184
+ self.storage_class_specifier()
+ pass
+ elif token in [CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.IDENTIFIER]:
+ self.state = 185
+ self.type_specifier()
+ pass
+ elif token in [CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36]:
+ self.state = 186
+ self.type_qualifier()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 189
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,9,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class DeclarationContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.a = None # Token
+ self.b = None # Declaration_specifiersContext
+ self.c = None # Init_declarator_listContext
+ self.d = None # Token
+ self.s = None # Declaration_specifiersContext
+ self.t = None # Init_declarator_listContext
+ self.e = None # Token
+
+ def init_declarator_list(self):
+ return self.getTypedRuleContext(CParser.Init_declarator_listContext,0)
+
+
+ def declaration_specifiers(self):
+ return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_declaration
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterDeclaration" ):
+ listener.enterDeclaration(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitDeclaration" ):
+ listener.exitDeclaration(self)
+
+
+
+
+ def declaration(self):
+
+ localctx = CParser.DeclarationContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 8, self.RULE_declaration)
+ self._la = 0 # Token type
+ try:
+ self.state = 206
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__2]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 191
+ localctx.a = self.match(CParser.T__2)
+ self.state = 193
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,10,self._ctx)
+ if la_ == 1:
+ self.state = 192
+ localctx.b = self.declaration_specifiers()
+
+
+ self.state = 195
+ localctx.c = self.init_declarator_list()
+ self.state = 196
+ localctx.d = self.match(CParser.T__1)
+
+ if localctx.b is not None:
+ self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, (0 if localctx.d is None else localctx.d.line), localctx.d.column, (None if localctx.b is None else self._input.getText((localctx.b.start,localctx.b.stop))), (None if localctx.c is None else self._input.getText((localctx.c.start,localctx.c.stop))))
+ else:
+ self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, (0 if localctx.d is None else localctx.d.line), localctx.d.column, '', (None if localctx.c is None else self._input.getText((localctx.c.start,localctx.c.stop))))
+
+ pass
+ elif token in [CParser.T__5, CParser.T__6, CParser.T__7, CParser.T__8, CParser.T__9, CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36, CParser.IDENTIFIER]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 199
+ localctx.s = self.declaration_specifiers()
+ self.state = 201
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if ((((_la - 34)) & ~0x3f) == 0 and ((1 << (_la - 34)) & ((1 << (CParser.T__33 - 34)) | (1 << (CParser.T__34 - 34)) | (1 << (CParser.T__35 - 34)) | (1 << (CParser.T__37 - 34)) | (1 << (CParser.T__41 - 34)) | (1 << (CParser.IDENTIFIER - 34)))) != 0):
+ self.state = 200
+ localctx.t = self.init_declarator_list()
+
+
+ self.state = 203
+ localctx.e = self.match(CParser.T__1)
+
+ if localctx.t is not None:
+ self.StoreVariableDeclaration((None if localctx.s is None else localctx.s.start).line, (None if localctx.s is None else localctx.s.start).column, (None if localctx.t is None else localctx.t.start).line, (None if localctx.t is None else localctx.t.start).column, (None if localctx.s is None else self._input.getText((localctx.s.start,localctx.s.stop))), (None if localctx.t is None else self._input.getText((localctx.t.start,localctx.t.stop))))
+
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Init_declarator_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def init_declarator(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Init_declaratorContext)
+ else:
+ return self.getTypedRuleContext(CParser.Init_declaratorContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_init_declarator_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterInit_declarator_list" ):
+ listener.enterInit_declarator_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitInit_declarator_list" ):
+ listener.exitInit_declarator_list(self)
+
+
+
+
+ def init_declarator_list(self):
+
+ localctx = CParser.Init_declarator_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 10, self.RULE_init_declarator_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 208
+ self.init_declarator()
+ self.state = 213
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__3:
+ self.state = 209
+ self.match(CParser.T__3)
+ self.state = 210
+ self.init_declarator()
+ self.state = 215
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Init_declaratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def declarator(self):
+ return self.getTypedRuleContext(CParser.DeclaratorContext,0)
+
+
+ def initializer(self):
+ return self.getTypedRuleContext(CParser.InitializerContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_init_declarator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterInit_declarator" ):
+ listener.enterInit_declarator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitInit_declarator" ):
+ listener.exitInit_declarator(self)
+
+
+
+
+ def init_declarator(self):
+
+ localctx = CParser.Init_declaratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 12, self.RULE_init_declarator)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 216
+ self.declarator()
+ self.state = 219
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__4:
+ self.state = 217
+ self.match(CParser.T__4)
+ self.state = 218
+ self.initializer()
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Storage_class_specifierContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_storage_class_specifier
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStorage_class_specifier" ):
+ listener.enterStorage_class_specifier(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStorage_class_specifier" ):
+ listener.exitStorage_class_specifier(self)
+
+
+
+
+ def storage_class_specifier(self):
+
+ localctx = CParser.Storage_class_specifierContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 14, self.RULE_storage_class_specifier)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 221
+ _la = self._input.LA(1)
+ if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9))) != 0)):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Type_specifierContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.s = None # Struct_or_union_specifierContext
+ self.e = None # Enum_specifierContext
+
+ def struct_or_union_specifier(self):
+ return self.getTypedRuleContext(CParser.Struct_or_union_specifierContext,0)
+
+
+ def enum_specifier(self):
+ return self.getTypedRuleContext(CParser.Enum_specifierContext,0)
+
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def declarator(self):
+ return self.getTypedRuleContext(CParser.DeclaratorContext,0)
+
+
+ # @param i=None Type: int
+ def type_qualifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Type_qualifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
+
+
+ def type_id(self):
+ return self.getTypedRuleContext(CParser.Type_idContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_type_specifier
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterType_specifier" ):
+ listener.enterType_specifier(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitType_specifier" ):
+ listener.exitType_specifier(self)
+
+
+
+
+ def type_specifier(self):
+
+ localctx = CParser.Type_specifierContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 16, self.RULE_type_specifier)
+ try:
+ self.state = 247
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,16,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 223
+ self.match(CParser.T__10)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 224
+ self.match(CParser.T__11)
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 225
+ self.match(CParser.T__12)
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 226
+ self.match(CParser.T__13)
+ pass
+
+ elif la_ == 5:
+ self.enterOuterAlt(localctx, 5)
+ self.state = 227
+ self.match(CParser.T__14)
+ pass
+
+ elif la_ == 6:
+ self.enterOuterAlt(localctx, 6)
+ self.state = 228
+ self.match(CParser.T__15)
+ pass
+
+ elif la_ == 7:
+ self.enterOuterAlt(localctx, 7)
+ self.state = 229
+ self.match(CParser.T__16)
+ pass
+
+ elif la_ == 8:
+ self.enterOuterAlt(localctx, 8)
+ self.state = 230
+ self.match(CParser.T__17)
+ pass
+
+ elif la_ == 9:
+ self.enterOuterAlt(localctx, 9)
+ self.state = 231
+ self.match(CParser.T__18)
+ pass
+
+ elif la_ == 10:
+ self.enterOuterAlt(localctx, 10)
+ self.state = 232
+ localctx.s = self.struct_or_union_specifier()
+
+ if localctx.s.stop is not None:
+ self.StoreStructUnionDefinition((None if localctx.s is None else localctx.s.start).line, (None if localctx.s is None else localctx.s.start).column, (None if localctx.s is None else localctx.s.stop).line, (None if localctx.s is None else localctx.s.stop).column, (None if localctx.s is None else self._input.getText((localctx.s.start,localctx.s.stop))))
+
+ pass
+
+ elif la_ == 11:
+ self.enterOuterAlt(localctx, 11)
+ self.state = 235
+ localctx.e = self.enum_specifier()
+
+ if localctx.e.stop is not None:
+ self.StoreEnumerationDefinition((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
+
+ pass
+
+ elif la_ == 12:
+ self.enterOuterAlt(localctx, 12)
+ self.state = 238
+ self.match(CParser.IDENTIFIER)
+ self.state = 242
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,15,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 239
+ self.type_qualifier()
+ self.state = 244
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,15,self._ctx)
+
+ self.state = 245
+ self.declarator()
+ pass
+
+ elif la_ == 13:
+ self.enterOuterAlt(localctx, 13)
+ self.state = 246
+ self.type_id()
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Type_idContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def getRuleIndex(self):
+ return CParser.RULE_type_id
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterType_id" ):
+ listener.enterType_id(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitType_id" ):
+ listener.exitType_id(self)
+
+
+
+
+ def type_id(self):
+
+ localctx = CParser.Type_idContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 18, self.RULE_type_id)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 249
+ self.match(CParser.IDENTIFIER)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Struct_or_union_specifierContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def struct_or_union(self):
+ return self.getTypedRuleContext(CParser.Struct_or_unionContext,0)
+
+
+ def struct_declaration_list(self):
+ return self.getTypedRuleContext(CParser.Struct_declaration_listContext,0)
+
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def getRuleIndex(self):
+ return CParser.RULE_struct_or_union_specifier
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStruct_or_union_specifier" ):
+ listener.enterStruct_or_union_specifier(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStruct_or_union_specifier" ):
+ listener.exitStruct_or_union_specifier(self)
+
+
+
+
+ def struct_or_union_specifier(self):
+
+ localctx = CParser.Struct_or_union_specifierContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 20, self.RULE_struct_or_union_specifier)
+ self._la = 0 # Token type
+ try:
+ self.state = 262
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,18,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 251
+ self.struct_or_union()
+ self.state = 253
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.IDENTIFIER:
+ self.state = 252
+ self.match(CParser.IDENTIFIER)
+
+
+ self.state = 255
+ self.match(CParser.T__0)
+ self.state = 256
+ self.struct_declaration_list()
+ self.state = 257
+ self.match(CParser.T__19)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 259
+ self.struct_or_union()
+ self.state = 260
+ self.match(CParser.IDENTIFIER)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Struct_or_unionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_struct_or_union
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStruct_or_union" ):
+ listener.enterStruct_or_union(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStruct_or_union" ):
+ listener.exitStruct_or_union(self)
+
+
+
+
+ def struct_or_union(self):
+
+ localctx = CParser.Struct_or_unionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 22, self.RULE_struct_or_union)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 264
+ _la = self._input.LA(1)
+ if not(_la==CParser.T__20 or _la==CParser.T__21):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Struct_declaration_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def struct_declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Struct_declarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.Struct_declarationContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_struct_declaration_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStruct_declaration_list" ):
+ listener.enterStruct_declaration_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStruct_declaration_list" ):
+ listener.exitStruct_declaration_list(self)
+
+
+
+
+ def struct_declaration_list(self):
+
+ localctx = CParser.Struct_declaration_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 24, self.RULE_struct_declaration_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 267
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while True:
+ self.state = 266
+ self.struct_declaration()
+ self.state = 269
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0) or _la==CParser.IDENTIFIER):
+ break
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Struct_declarationContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def specifier_qualifier_list(self):
+ return self.getTypedRuleContext(CParser.Specifier_qualifier_listContext,0)
+
+
+ def struct_declarator_list(self):
+ return self.getTypedRuleContext(CParser.Struct_declarator_listContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_struct_declaration
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStruct_declaration" ):
+ listener.enterStruct_declaration(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStruct_declaration" ):
+ listener.exitStruct_declaration(self)
+
+
+
+
+ def struct_declaration(self):
+
+ localctx = CParser.Struct_declarationContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 26, self.RULE_struct_declaration)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 271
+ self.specifier_qualifier_list()
+ self.state = 272
+ self.struct_declarator_list()
+ self.state = 273
+ self.match(CParser.T__1)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Specifier_qualifier_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def type_qualifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Type_qualifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
+
+
+ # @param i=None Type: int
+ def type_specifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Type_specifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Type_specifierContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_specifier_qualifier_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterSpecifier_qualifier_list" ):
+ listener.enterSpecifier_qualifier_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitSpecifier_qualifier_list" ):
+ listener.exitSpecifier_qualifier_list(self)
+
+
+
+
+ def specifier_qualifier_list(self):
+
+ localctx = CParser.Specifier_qualifier_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 28, self.RULE_specifier_qualifier_list)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 277
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 277
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36]:
+ self.state = 275
+ self.type_qualifier()
+ pass
+ elif token in [CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.IDENTIFIER]:
+ self.state = 276
+ self.type_specifier()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 279
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,21,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Struct_declarator_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def struct_declarator(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Struct_declaratorContext)
+ else:
+ return self.getTypedRuleContext(CParser.Struct_declaratorContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_struct_declarator_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStruct_declarator_list" ):
+ listener.enterStruct_declarator_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStruct_declarator_list" ):
+ listener.exitStruct_declarator_list(self)
+
+
+
+
+ def struct_declarator_list(self):
+
+ localctx = CParser.Struct_declarator_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 30, self.RULE_struct_declarator_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 281
+ self.struct_declarator()
+ self.state = 286
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__3:
+ self.state = 282
+ self.match(CParser.T__3)
+ self.state = 283
+ self.struct_declarator()
+ self.state = 288
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Struct_declaratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def declarator(self):
+ return self.getTypedRuleContext(CParser.DeclaratorContext,0)
+
+
+ def constant_expression(self):
+ return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_struct_declarator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStruct_declarator" ):
+ listener.enterStruct_declarator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStruct_declarator" ):
+ listener.exitStruct_declarator(self)
+
+
+
+
+ def struct_declarator(self):
+
+ localctx = CParser.Struct_declaratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 32, self.RULE_struct_declarator)
+ self._la = 0 # Token type
+ try:
+ self.state = 296
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__37, CParser.T__41, CParser.IDENTIFIER]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 289
+ self.declarator()
+ self.state = 292
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__22:
+ self.state = 290
+ self.match(CParser.T__22)
+ self.state = 291
+ self.constant_expression()
+
+
+ pass
+ elif token in [CParser.T__22]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 294
+ self.match(CParser.T__22)
+ self.state = 295
+ self.constant_expression()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Enum_specifierContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def enumerator_list(self):
+ return self.getTypedRuleContext(CParser.Enumerator_listContext,0)
+
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def getRuleIndex(self):
+ return CParser.RULE_enum_specifier
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterEnum_specifier" ):
+ listener.enterEnum_specifier(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitEnum_specifier" ):
+ listener.exitEnum_specifier(self)
+
+
+
+
+ def enum_specifier(self):
+
+ localctx = CParser.Enum_specifierContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 34, self.RULE_enum_specifier)
+ self._la = 0 # Token type
+ try:
+ self.state = 317
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,27,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 298
+ self.match(CParser.T__23)
+ self.state = 299
+ self.match(CParser.T__0)
+ self.state = 300
+ self.enumerator_list()
+ self.state = 302
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__3:
+ self.state = 301
+ self.match(CParser.T__3)
+
+
+ self.state = 304
+ self.match(CParser.T__19)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 306
+ self.match(CParser.T__23)
+ self.state = 307
+ self.match(CParser.IDENTIFIER)
+ self.state = 308
+ self.match(CParser.T__0)
+ self.state = 309
+ self.enumerator_list()
+ self.state = 311
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__3:
+ self.state = 310
+ self.match(CParser.T__3)
+
+
+ self.state = 313
+ self.match(CParser.T__19)
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 315
+ self.match(CParser.T__23)
+ self.state = 316
+ self.match(CParser.IDENTIFIER)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Enumerator_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def enumerator(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.EnumeratorContext)
+ else:
+ return self.getTypedRuleContext(CParser.EnumeratorContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_enumerator_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterEnumerator_list" ):
+ listener.enterEnumerator_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitEnumerator_list" ):
+ listener.exitEnumerator_list(self)
+
+
+
+
+ def enumerator_list(self):
+
+ localctx = CParser.Enumerator_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 36, self.RULE_enumerator_list)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 319
+ self.enumerator()
+ self.state = 324
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,28,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 320
+ self.match(CParser.T__3)
+ self.state = 321
+ self.enumerator()
+ self.state = 326
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,28,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class EnumeratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def constant_expression(self):
+ return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_enumerator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterEnumerator" ):
+ listener.enterEnumerator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitEnumerator" ):
+ listener.exitEnumerator(self)
+
+
+
+
+ def enumerator(self):
+
+ localctx = CParser.EnumeratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 38, self.RULE_enumerator)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 327
+ self.match(CParser.IDENTIFIER)
+ self.state = 330
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__4:
+ self.state = 328
+ self.match(CParser.T__4)
+ self.state = 329
+ self.constant_expression()
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Type_qualifierContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_type_qualifier
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterType_qualifier" ):
+ listener.enterType_qualifier(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitType_qualifier" ):
+ listener.exitType_qualifier(self)
+
+
+
+
+ def type_qualifier(self):
+
+ localctx = CParser.Type_qualifierContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 40, self.RULE_type_qualifier)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 332
+ _la = self._input.LA(1)
+ if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0)):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class DeclaratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def direct_declarator(self):
+ return self.getTypedRuleContext(CParser.Direct_declaratorContext,0)
+
+
+ def pointer(self):
+ return self.getTypedRuleContext(CParser.PointerContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_declarator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterDeclarator" ):
+ listener.enterDeclarator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitDeclarator" ):
+ listener.exitDeclarator(self)
+
+
+
+
+ def declarator(self):
+
+ localctx = CParser.DeclaratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 42, self.RULE_declarator)
+ self._la = 0 # Token type
+ try:
+ self.state = 348
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,34,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 335
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__41:
+ self.state = 334
+ self.pointer()
+
+
+ self.state = 338
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__33:
+ self.state = 337
+ self.match(CParser.T__33)
+
+
+ self.state = 341
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__34:
+ self.state = 340
+ self.match(CParser.T__34)
+
+
+ self.state = 344
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__35:
+ self.state = 343
+ self.match(CParser.T__35)
+
+
+ self.state = 346
+ self.direct_declarator()
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 347
+ self.pointer()
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Direct_declaratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ # @param i=None Type: int
+ def declarator_suffix(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Declarator_suffixContext)
+ else:
+ return self.getTypedRuleContext(CParser.Declarator_suffixContext,i)
+
+
+ def declarator(self):
+ return self.getTypedRuleContext(CParser.DeclaratorContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_direct_declarator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterDirect_declarator" ):
+ listener.enterDirect_declarator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitDirect_declarator" ):
+ listener.exitDirect_declarator(self)
+
+
+
+
+ def direct_declarator(self):
+
+ localctx = CParser.Direct_declaratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 44, self.RULE_direct_declarator)
+ try:
+ self.state = 368
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.IDENTIFIER]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 350
+ self.match(CParser.IDENTIFIER)
+ self.state = 354
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,35,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 351
+ self.declarator_suffix()
+ self.state = 356
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,35,self._ctx)
+
+ pass
+ elif token in [CParser.T__37]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 357
+ self.match(CParser.T__37)
+ self.state = 359
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,36,self._ctx)
+ if la_ == 1:
+ self.state = 358
+ self.match(CParser.T__33)
+
+
+ self.state = 361
+ self.declarator()
+ self.state = 362
+ self.match(CParser.T__38)
+ self.state = 364
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 363
+ self.declarator_suffix()
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 366
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,37,self._ctx)
+
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Declarator_suffixContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def constant_expression(self):
+ return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
+
+
+ def parameter_type_list(self):
+ return self.getTypedRuleContext(CParser.Parameter_type_listContext,0)
+
+
+ def identifier_list(self):
+ return self.getTypedRuleContext(CParser.Identifier_listContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_declarator_suffix
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterDeclarator_suffix" ):
+ listener.enterDeclarator_suffix(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitDeclarator_suffix" ):
+ listener.exitDeclarator_suffix(self)
+
+
+
+
+ def declarator_suffix(self):
+
+ localctx = CParser.Declarator_suffixContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 46, self.RULE_declarator_suffix)
+ try:
+ self.state = 386
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,39,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 370
+ self.match(CParser.T__39)
+ self.state = 371
+ self.constant_expression()
+ self.state = 372
+ self.match(CParser.T__40)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 374
+ self.match(CParser.T__39)
+ self.state = 375
+ self.match(CParser.T__40)
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 376
+ self.match(CParser.T__37)
+ self.state = 377
+ self.parameter_type_list()
+ self.state = 378
+ self.match(CParser.T__38)
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 380
+ self.match(CParser.T__37)
+ self.state = 381
+ self.identifier_list()
+ self.state = 382
+ self.match(CParser.T__38)
+ pass
+
+ elif la_ == 5:
+ self.enterOuterAlt(localctx, 5)
+ self.state = 384
+ self.match(CParser.T__37)
+ self.state = 385
+ self.match(CParser.T__38)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class PointerContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def type_qualifier(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Type_qualifierContext)
+ else:
+ return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
+
+
+ def pointer(self):
+ return self.getTypedRuleContext(CParser.PointerContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_pointer
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterPointer" ):
+ listener.enterPointer(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitPointer" ):
+ listener.exitPointer(self)
+
+
+
+
+ def pointer(self):
+
+ localctx = CParser.PointerContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 48, self.RULE_pointer)
+ try:
+ self.state = 400
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,42,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 388
+ self.match(CParser.T__41)
+ self.state = 390
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 389
+ self.type_qualifier()
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 392
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,40,self._ctx)
+
+ self.state = 395
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,41,self._ctx)
+ if la_ == 1:
+ self.state = 394
+ self.pointer()
+
+
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 397
+ self.match(CParser.T__41)
+ self.state = 398
+ self.pointer()
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 399
+ self.match(CParser.T__41)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Parameter_type_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def parameter_list(self):
+ return self.getTypedRuleContext(CParser.Parameter_listContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_parameter_type_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterParameter_type_list" ):
+ listener.enterParameter_type_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitParameter_type_list" ):
+ listener.exitParameter_type_list(self)
+
+
+
+
+ def parameter_type_list(self):
+
+ localctx = CParser.Parameter_type_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 50, self.RULE_parameter_type_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 402
+ self.parameter_list()
+ self.state = 408
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__3:
+ self.state = 403
+ self.match(CParser.T__3)
+ self.state = 405
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__28:
+ self.state = 404
+ self.match(CParser.T__28)
+
+
+ self.state = 407
+ self.match(CParser.T__42)
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Parameter_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def parameter_declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Parameter_declarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.Parameter_declarationContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_parameter_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterParameter_list" ):
+ listener.enterParameter_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitParameter_list" ):
+ listener.exitParameter_list(self)
+
+
+
+
+ def parameter_list(self):
+
+ localctx = CParser.Parameter_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 52, self.RULE_parameter_list)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 410
+ self.parameter_declaration()
+ self.state = 418
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,46,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 411
+ self.match(CParser.T__3)
+ self.state = 413
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,45,self._ctx)
+ if la_ == 1:
+ self.state = 412
+ self.match(CParser.T__28)
+
+
+ self.state = 415
+ self.parameter_declaration()
+ self.state = 420
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,46,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Parameter_declarationContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def declaration_specifiers(self):
+ return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
+
+
+ # @param i=None Type: int
+ def declarator(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.DeclaratorContext)
+ else:
+ return self.getTypedRuleContext(CParser.DeclaratorContext,i)
+
+
+ # @param i=None Type: int
+ def abstract_declarator(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Abstract_declaratorContext)
+ else:
+ return self.getTypedRuleContext(CParser.Abstract_declaratorContext,i)
+
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ # @param i=None Type: int
+ def pointer(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.PointerContext)
+ else:
+ return self.getTypedRuleContext(CParser.PointerContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_parameter_declaration
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterParameter_declaration" ):
+ listener.enterParameter_declaration(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitParameter_declaration" ):
+ listener.exitParameter_declaration(self)
+
+
+
+
+ def parameter_declaration(self):
+
+ localctx = CParser.Parameter_declarationContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 54, self.RULE_parameter_declaration)
+ self._la = 0 # Token type
+ try:
+ self.state = 439
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,51,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 421
+ self.declaration_specifiers()
+ self.state = 426
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while ((((_la - 34)) & ~0x3f) == 0 and ((1 << (_la - 34)) & ((1 << (CParser.T__33 - 34)) | (1 << (CParser.T__34 - 34)) | (1 << (CParser.T__35 - 34)) | (1 << (CParser.T__37 - 34)) | (1 << (CParser.T__39 - 34)) | (1 << (CParser.T__41 - 34)) | (1 << (CParser.IDENTIFIER - 34)))) != 0):
+ self.state = 424
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,47,self._ctx)
+ if la_ == 1:
+ self.state = 422
+ self.declarator()
+ pass
+
+ elif la_ == 2:
+ self.state = 423
+ self.abstract_declarator()
+ pass
+
+
+ self.state = 428
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 430
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__28:
+ self.state = 429
+ self.match(CParser.T__28)
+
+
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 435
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__41:
+ self.state = 432
+ self.pointer()
+ self.state = 437
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 438
+ self.match(CParser.IDENTIFIER)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Identifier_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def IDENTIFIER(self,i=None):
+ if i is None:
+ return self.getTokens(CParser.IDENTIFIER)
+ else:
+ return self.getToken(CParser.IDENTIFIER, i)
+
+ def getRuleIndex(self):
+ return CParser.RULE_identifier_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterIdentifier_list" ):
+ listener.enterIdentifier_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitIdentifier_list" ):
+ listener.exitIdentifier_list(self)
+
+
+
+
+ def identifier_list(self):
+
+ localctx = CParser.Identifier_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 56, self.RULE_identifier_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 441
+ self.match(CParser.IDENTIFIER)
+ self.state = 446
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__3:
+ self.state = 442
+ self.match(CParser.T__3)
+ self.state = 443
+ self.match(CParser.IDENTIFIER)
+ self.state = 448
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Type_nameContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def specifier_qualifier_list(self):
+ return self.getTypedRuleContext(CParser.Specifier_qualifier_listContext,0)
+
+
+ def abstract_declarator(self):
+ return self.getTypedRuleContext(CParser.Abstract_declaratorContext,0)
+
+
+ def type_id(self):
+ return self.getTypedRuleContext(CParser.Type_idContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_type_name
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterType_name" ):
+ listener.enterType_name(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitType_name" ):
+ listener.exitType_name(self)
+
+
+
+
+ def type_name(self):
+
+ localctx = CParser.Type_nameContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 58, self.RULE_type_name)
+ self._la = 0 # Token type
+ try:
+ self.state = 454
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,54,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 449
+ self.specifier_qualifier_list()
+ self.state = 451
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__37) | (1 << CParser.T__39) | (1 << CParser.T__41))) != 0):
+ self.state = 450
+ self.abstract_declarator()
+
+
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 453
+ self.type_id()
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Abstract_declaratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def pointer(self):
+ return self.getTypedRuleContext(CParser.PointerContext,0)
+
+
+ def direct_abstract_declarator(self):
+ return self.getTypedRuleContext(CParser.Direct_abstract_declaratorContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_abstract_declarator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAbstract_declarator" ):
+ listener.enterAbstract_declarator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAbstract_declarator" ):
+ listener.exitAbstract_declarator(self)
+
+
+
+
+ def abstract_declarator(self):
+
+ localctx = CParser.Abstract_declaratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 60, self.RULE_abstract_declarator)
+ try:
+ self.state = 461
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__41]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 456
+ self.pointer()
+ self.state = 458
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,55,self._ctx)
+ if la_ == 1:
+ self.state = 457
+ self.direct_abstract_declarator()
+
+
+ pass
+ elif token in [CParser.T__37, CParser.T__39]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 460
+ self.direct_abstract_declarator()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Direct_abstract_declaratorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def abstract_declarator(self):
+ return self.getTypedRuleContext(CParser.Abstract_declaratorContext,0)
+
+
+ # @param i=None Type: int
+ def abstract_declarator_suffix(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Abstract_declarator_suffixContext)
+ else:
+ return self.getTypedRuleContext(CParser.Abstract_declarator_suffixContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_direct_abstract_declarator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterDirect_abstract_declarator" ):
+ listener.enterDirect_abstract_declarator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitDirect_abstract_declarator" ):
+ listener.exitDirect_abstract_declarator(self)
+
+
+
+ def direct_abstract_declarator(self):
+
+ localctx = CParser.Direct_abstract_declaratorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 62, self.RULE_direct_abstract_declarator)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 468
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,57,self._ctx)
+ if la_ == 1:
+ self.state = 463
+ self.match(CParser.T__37)
+ self.state = 464
+ self.abstract_declarator()
+ self.state = 465
+ self.match(CParser.T__38)
+ pass
+
+ elif la_ == 2:
+ self.state = 467
+ self.abstract_declarator_suffix()
+ pass
+
+
+ self.state = 473
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,58,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 470
+ self.abstract_declarator_suffix()
+ self.state = 475
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,58,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Abstract_declarator_suffixContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def constant_expression(self):
+ return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
+
+
+ def parameter_type_list(self):
+ return self.getTypedRuleContext(CParser.Parameter_type_listContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_abstract_declarator_suffix
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAbstract_declarator_suffix" ):
+ listener.enterAbstract_declarator_suffix(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAbstract_declarator_suffix" ):
+ listener.exitAbstract_declarator_suffix(self)
+
+
+
+
+ def abstract_declarator_suffix(self):
+
+ localctx = CParser.Abstract_declarator_suffixContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 64, self.RULE_abstract_declarator_suffix)
+ try:
+ self.state = 488
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,59,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 476
+ self.match(CParser.T__39)
+ self.state = 477
+ self.match(CParser.T__40)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 478
+ self.match(CParser.T__39)
+ self.state = 479
+ self.constant_expression()
+ self.state = 480
+ self.match(CParser.T__40)
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 482
+ self.match(CParser.T__37)
+ self.state = 483
+ self.match(CParser.T__38)
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 484
+ self.match(CParser.T__37)
+ self.state = 485
+ self.parameter_type_list()
+ self.state = 486
+ self.match(CParser.T__38)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class InitializerContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def assignment_expression(self):
+ return self.getTypedRuleContext(CParser.Assignment_expressionContext,0)
+
+
+ def initializer_list(self):
+ return self.getTypedRuleContext(CParser.Initializer_listContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_initializer
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterInitializer" ):
+ listener.enterInitializer(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitInitializer" ):
+ listener.exitInitializer(self)
+
+
+
+
+ def initializer(self):
+
+ localctx = CParser.InitializerContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 66, self.RULE_initializer)
+ self._la = 0 # Token type
+ try:
+ self.state = 498
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__37, CParser.T__41, CParser.T__43, CParser.T__44, CParser.T__47, CParser.T__48, CParser.T__49, CParser.T__52, CParser.T__53, CParser.T__54, CParser.IDENTIFIER, CParser.CHARACTER_LITERAL, CParser.STRING_LITERAL, CParser.HEX_LITERAL, CParser.DECIMAL_LITERAL, CParser.OCTAL_LITERAL, CParser.FLOATING_POINT_LITERAL]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 490
+ self.assignment_expression()
+ pass
+ elif token in [CParser.T__0]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 491
+ self.match(CParser.T__0)
+ self.state = 492
+ self.initializer_list()
+ self.state = 494
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__3:
+ self.state = 493
+ self.match(CParser.T__3)
+
+
+ self.state = 496
+ self.match(CParser.T__19)
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Initializer_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def initializer(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.InitializerContext)
+ else:
+ return self.getTypedRuleContext(CParser.InitializerContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_initializer_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterInitializer_list" ):
+ listener.enterInitializer_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitInitializer_list" ):
+ listener.exitInitializer_list(self)
+
+
+
+
+ def initializer_list(self):
+
+ localctx = CParser.Initializer_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 68, self.RULE_initializer_list)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 500
+ self.initializer()
+ self.state = 505
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,62,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 501
+ self.match(CParser.T__3)
+ self.state = 502
+ self.initializer()
+ self.state = 507
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,62,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Argument_expression_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def assignment_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Assignment_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Assignment_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_argument_expression_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterArgument_expression_list" ):
+ listener.enterArgument_expression_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitArgument_expression_list" ):
+ listener.exitArgument_expression_list(self)
+
+
+
+
+ def argument_expression_list(self):
+
+ localctx = CParser.Argument_expression_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 70, self.RULE_argument_expression_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 508
+ self.assignment_expression()
+ self.state = 510
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__28:
+ self.state = 509
+ self.match(CParser.T__28)
+
+
+ self.state = 519
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__3:
+ self.state = 512
+ self.match(CParser.T__3)
+ self.state = 513
+ self.assignment_expression()
+ self.state = 515
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__28:
+ self.state = 514
+ self.match(CParser.T__28)
+
+
+ self.state = 521
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Additive_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def multiplicative_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Multiplicative_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Multiplicative_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_additive_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAdditive_expression" ):
+ listener.enterAdditive_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAdditive_expression" ):
+ listener.exitAdditive_expression(self)
+
+
+
+
+ def additive_expression(self):
+
+ localctx = CParser.Additive_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 72, self.RULE_additive_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 522
+ self.multiplicative_expression()
+ self.state = 529
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__43 or _la==CParser.T__44:
+ self.state = 527
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__43]:
+ self.state = 523
+ self.match(CParser.T__43)
+ self.state = 524
+ self.multiplicative_expression()
+ pass
+ elif token in [CParser.T__44]:
+ self.state = 525
+ self.match(CParser.T__44)
+ self.state = 526
+ self.multiplicative_expression()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ self.state = 531
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Multiplicative_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def cast_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Cast_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Cast_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_multiplicative_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterMultiplicative_expression" ):
+ listener.enterMultiplicative_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitMultiplicative_expression" ):
+ listener.exitMultiplicative_expression(self)
+
+
+
+
+ def multiplicative_expression(self):
+
+ localctx = CParser.Multiplicative_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 74, self.RULE_multiplicative_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 532
+ self.cast_expression()
+ self.state = 541
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__41) | (1 << CParser.T__45) | (1 << CParser.T__46))) != 0):
+ self.state = 539
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__41]:
+ self.state = 533
+ self.match(CParser.T__41)
+ self.state = 534
+ self.cast_expression()
+ pass
+ elif token in [CParser.T__45]:
+ self.state = 535
+ self.match(CParser.T__45)
+ self.state = 536
+ self.cast_expression()
+ pass
+ elif token in [CParser.T__46]:
+ self.state = 537
+ self.match(CParser.T__46)
+ self.state = 538
+ self.cast_expression()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ self.state = 543
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Cast_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def type_name(self):
+ return self.getTypedRuleContext(CParser.Type_nameContext,0)
+
+
+ def cast_expression(self):
+ return self.getTypedRuleContext(CParser.Cast_expressionContext,0)
+
+
+ def unary_expression(self):
+ return self.getTypedRuleContext(CParser.Unary_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_cast_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterCast_expression" ):
+ listener.enterCast_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitCast_expression" ):
+ listener.exitCast_expression(self)
+
+
+
+
+ def cast_expression(self):
+
+ localctx = CParser.Cast_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 76, self.RULE_cast_expression)
+ try:
+ self.state = 550
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,70,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 544
+ self.match(CParser.T__37)
+ self.state = 545
+ self.type_name()
+ self.state = 546
+ self.match(CParser.T__38)
+ self.state = 547
+ self.cast_expression()
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 549
+ self.unary_expression()
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Unary_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def postfix_expression(self):
+ return self.getTypedRuleContext(CParser.Postfix_expressionContext,0)
+
+
+ def unary_expression(self):
+ return self.getTypedRuleContext(CParser.Unary_expressionContext,0)
+
+
+ def unary_operator(self):
+ return self.getTypedRuleContext(CParser.Unary_operatorContext,0)
+
+
+ def cast_expression(self):
+ return self.getTypedRuleContext(CParser.Cast_expressionContext,0)
+
+
+ def type_name(self):
+ return self.getTypedRuleContext(CParser.Type_nameContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_unary_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterUnary_expression" ):
+ listener.enterUnary_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitUnary_expression" ):
+ listener.exitUnary_expression(self)
+
+
+
+
+ def unary_expression(self):
+
+ localctx = CParser.Unary_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 78, self.RULE_unary_expression)
+ try:
+ self.state = 567
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,71,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 552
+ self.postfix_expression()
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 553
+ self.match(CParser.T__47)
+ self.state = 554
+ self.unary_expression()
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 555
+ self.match(CParser.T__48)
+ self.state = 556
+ self.unary_expression()
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 557
+ self.unary_operator()
+ self.state = 558
+ self.cast_expression()
+ pass
+
+ elif la_ == 5:
+ self.enterOuterAlt(localctx, 5)
+ self.state = 560
+ self.match(CParser.T__49)
+ self.state = 561
+ self.unary_expression()
+ pass
+
+ elif la_ == 6:
+ self.enterOuterAlt(localctx, 6)
+ self.state = 562
+ self.match(CParser.T__49)
+ self.state = 563
+ self.match(CParser.T__37)
+ self.state = 564
+ self.type_name()
+ self.state = 565
+ self.match(CParser.T__38)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Postfix_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.FuncCallText = ''
+ self.p = None # Primary_expressionContext
+ self.a = None # Token
+ self.c = None # Argument_expression_listContext
+ self.b = None # Token
+ self.x = None # Token
+ self.y = None # Token
+ self.z = None # Token
+
+ def primary_expression(self):
+ return self.getTypedRuleContext(CParser.Primary_expressionContext,0)
+
+
+ # @param i=None Type: int
+ def expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.ExpressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.ExpressionContext,i)
+
+
+ # @param i=None Type: int
+ def macro_parameter_list(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Macro_parameter_listContext)
+ else:
+ return self.getTypedRuleContext(CParser.Macro_parameter_listContext,i)
+
+
+ # @param i=None Type: int
+ def argument_expression_list(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Argument_expression_listContext)
+ else:
+ return self.getTypedRuleContext(CParser.Argument_expression_listContext,i)
+
+
+ # @param i=None Type: int
+ def IDENTIFIER(self,i=None):
+ if i is None:
+ return self.getTokens(CParser.IDENTIFIER)
+ else:
+ return self.getToken(CParser.IDENTIFIER, i)
+
+ def getRuleIndex(self):
+ return CParser.RULE_postfix_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterPostfix_expression" ):
+ listener.enterPostfix_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitPostfix_expression" ):
+ listener.exitPostfix_expression(self)
+
+
+
+
+ def postfix_expression(self):
+
+ localctx = CParser.Postfix_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 80, self.RULE_postfix_expression)
+
+ self.FuncCallText=''
+
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 569
+ localctx.p = self.primary_expression()
+ self.FuncCallText += (None if localctx.p is None else self._input.getText((localctx.p.start,localctx.p.stop)))
+ self.state = 600
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,73,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 598
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,72,self._ctx)
+ if la_ == 1:
+ self.state = 571
+ self.match(CParser.T__39)
+ self.state = 572
+ self.expression()
+ self.state = 573
+ self.match(CParser.T__40)
+ pass
+
+ elif la_ == 2:
+ self.state = 575
+ self.match(CParser.T__37)
+ self.state = 576
+ localctx.a = self.match(CParser.T__38)
+ self.StoreFunctionCalling((None if localctx.p is None else localctx.p.start).line, (None if localctx.p is None else localctx.p.start).column, (0 if localctx.a is None else localctx.a.line), localctx.a.column, self.FuncCallText, '')
+ pass
+
+ elif la_ == 3:
+ self.state = 578
+ self.match(CParser.T__37)
+ self.state = 579
+ localctx.c = self.argument_expression_list()
+ self.state = 580
+ localctx.b = self.match(CParser.T__38)
+ self.StoreFunctionCalling((None if localctx.p is None else localctx.p.start).line, (None if localctx.p is None else localctx.p.start).column, (0 if localctx.b is None else localctx.b.line), localctx.b.column, self.FuncCallText, (None if localctx.c is None else self._input.getText((localctx.c.start,localctx.c.stop))))
+ pass
+
+ elif la_ == 4:
+ self.state = 583
+ self.match(CParser.T__37)
+ self.state = 584
+ self.macro_parameter_list()
+ self.state = 585
+ self.match(CParser.T__38)
+ pass
+
+ elif la_ == 5:
+ self.state = 587
+ self.match(CParser.T__50)
+ self.state = 588
+ localctx.x = self.match(CParser.IDENTIFIER)
+ self.FuncCallText += '.' + (None if localctx.x is None else localctx.x.text)
+ pass
+
+ elif la_ == 6:
+ self.state = 590
+ self.match(CParser.T__41)
+ self.state = 591
+ localctx.y = self.match(CParser.IDENTIFIER)
+ self.FuncCallText = (None if localctx.y is None else localctx.y.text)
+ pass
+
+ elif la_ == 7:
+ self.state = 593
+ self.match(CParser.T__51)
+ self.state = 594
+ localctx.z = self.match(CParser.IDENTIFIER)
+ self.FuncCallText += '->' + (None if localctx.z is None else localctx.z.text)
+ pass
+
+ elif la_ == 8:
+ self.state = 596
+ self.match(CParser.T__47)
+ pass
+
+ elif la_ == 9:
+ self.state = 597
+ self.match(CParser.T__48)
+ pass
+
+
+ self.state = 602
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,73,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Macro_parameter_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def parameter_declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Parameter_declarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.Parameter_declarationContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_macro_parameter_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterMacro_parameter_list" ):
+ listener.enterMacro_parameter_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitMacro_parameter_list" ):
+ listener.exitMacro_parameter_list(self)
+
+
+
+
+ def macro_parameter_list(self):
+
+ localctx = CParser.Macro_parameter_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 82, self.RULE_macro_parameter_list)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 603
+ self.parameter_declaration()
+ self.state = 608
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__3:
+ self.state = 604
+ self.match(CParser.T__3)
+ self.state = 605
+ self.parameter_declaration()
+ self.state = 610
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Unary_operatorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_unary_operator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterUnary_operator" ):
+ listener.enterUnary_operator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitUnary_operator" ):
+ listener.exitUnary_operator(self)
+
+
+
+
+ def unary_operator(self):
+
+ localctx = CParser.Unary_operatorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 84, self.RULE_unary_operator)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 611
+ _la = self._input.LA(1)
+ if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__41) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54))) != 0)):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Primary_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def constant(self):
+ return self.getTypedRuleContext(CParser.ConstantContext,0)
+
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_primary_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterPrimary_expression" ):
+ listener.enterPrimary_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitPrimary_expression" ):
+ listener.exitPrimary_expression(self)
+
+
+
+
+ def primary_expression(self):
+
+ localctx = CParser.Primary_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 86, self.RULE_primary_expression)
+ try:
+ self.state = 619
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,75,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 613
+ self.match(CParser.IDENTIFIER)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 614
+ self.constant()
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 615
+ self.match(CParser.T__37)
+ self.state = 616
+ self.expression()
+ self.state = 617
+ self.match(CParser.T__38)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class ConstantContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def HEX_LITERAL(self):
+ return self.getToken(CParser.HEX_LITERAL, 0)
+
+ def OCTAL_LITERAL(self):
+ return self.getToken(CParser.OCTAL_LITERAL, 0)
+
+ def DECIMAL_LITERAL(self):
+ return self.getToken(CParser.DECIMAL_LITERAL, 0)
+
+ def CHARACTER_LITERAL(self):
+ return self.getToken(CParser.CHARACTER_LITERAL, 0)
+
+ # @param i=None Type: int
+ def IDENTIFIER(self,i=None):
+ if i is None:
+ return self.getTokens(CParser.IDENTIFIER)
+ else:
+ return self.getToken(CParser.IDENTIFIER, i)
+
+ # @param i=None Type: int
+ def STRING_LITERAL(self,i=None):
+ if i is None:
+ return self.getTokens(CParser.STRING_LITERAL)
+ else:
+ return self.getToken(CParser.STRING_LITERAL, i)
+
+ def FLOATING_POINT_LITERAL(self):
+ return self.getToken(CParser.FLOATING_POINT_LITERAL, 0)
+
+ def getRuleIndex(self):
+ return CParser.RULE_constant
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterConstant" ):
+ listener.enterConstant(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitConstant" ):
+ listener.exitConstant(self)
+
+
+
+
+ def constant(self):
+
+ localctx = CParser.ConstantContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 88, self.RULE_constant)
+ self._la = 0 # Token type
+ try:
+ self.state = 647
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.HEX_LITERAL]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 621
+ self.match(CParser.HEX_LITERAL)
+ pass
+ elif token in [CParser.OCTAL_LITERAL]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 622
+ self.match(CParser.OCTAL_LITERAL)
+ pass
+ elif token in [CParser.DECIMAL_LITERAL]:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 623
+ self.match(CParser.DECIMAL_LITERAL)
+ pass
+ elif token in [CParser.CHARACTER_LITERAL]:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 624
+ self.match(CParser.CHARACTER_LITERAL)
+ pass
+ elif token in [CParser.IDENTIFIER, CParser.STRING_LITERAL]:
+ self.enterOuterAlt(localctx, 5)
+ self.state = 636
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 628
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.IDENTIFIER:
+ self.state = 625
+ self.match(CParser.IDENTIFIER)
+ self.state = 630
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 632
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 631
+ self.match(CParser.STRING_LITERAL)
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 634
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,77,self._ctx)
+
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 638
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,78,self._ctx)
+
+ self.state = 643
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.IDENTIFIER:
+ self.state = 640
+ self.match(CParser.IDENTIFIER)
+ self.state = 645
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ pass
+ elif token in [CParser.FLOATING_POINT_LITERAL]:
+ self.enterOuterAlt(localctx, 6)
+ self.state = 646
+ self.match(CParser.FLOATING_POINT_LITERAL)
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class ExpressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def assignment_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Assignment_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Assignment_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterExpression" ):
+ listener.enterExpression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitExpression" ):
+ listener.exitExpression(self)
+
+
+
+
+ def expression(self):
+
+ localctx = CParser.ExpressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 90, self.RULE_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 649
+ self.assignment_expression()
+ self.state = 654
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__3:
+ self.state = 650
+ self.match(CParser.T__3)
+ self.state = 651
+ self.assignment_expression()
+ self.state = 656
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Constant_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def conditional_expression(self):
+ return self.getTypedRuleContext(CParser.Conditional_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_constant_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterConstant_expression" ):
+ listener.enterConstant_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitConstant_expression" ):
+ listener.exitConstant_expression(self)
+
+
+
+
+ def constant_expression(self):
+
+ localctx = CParser.Constant_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 92, self.RULE_constant_expression)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 657
+ self.conditional_expression()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Assignment_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def lvalue(self):
+ return self.getTypedRuleContext(CParser.LvalueContext,0)
+
+
+ def assignment_operator(self):
+ return self.getTypedRuleContext(CParser.Assignment_operatorContext,0)
+
+
+ def assignment_expression(self):
+ return self.getTypedRuleContext(CParser.Assignment_expressionContext,0)
+
+
+ def conditional_expression(self):
+ return self.getTypedRuleContext(CParser.Conditional_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_assignment_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAssignment_expression" ):
+ listener.enterAssignment_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAssignment_expression" ):
+ listener.exitAssignment_expression(self)
+
+
+
+
+ def assignment_expression(self):
+
+ localctx = CParser.Assignment_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 94, self.RULE_assignment_expression)
+ try:
+ self.state = 664
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,82,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 659
+ self.lvalue()
+ self.state = 660
+ self.assignment_operator()
+ self.state = 661
+ self.assignment_expression()
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 663
+ self.conditional_expression()
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class LvalueContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def unary_expression(self):
+ return self.getTypedRuleContext(CParser.Unary_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_lvalue
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterLvalue" ):
+ listener.enterLvalue(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitLvalue" ):
+ listener.exitLvalue(self)
+
+
+
+
+ def lvalue(self):
+
+ localctx = CParser.LvalueContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 96, self.RULE_lvalue)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 666
+ self.unary_expression()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Assignment_operatorContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_assignment_operator
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAssignment_operator" ):
+ listener.enterAssignment_operator(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAssignment_operator" ):
+ listener.exitAssignment_operator(self)
+
+
+
+
+ def assignment_operator(self):
+
+ localctx = CParser.Assignment_operatorContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 98, self.RULE_assignment_operator)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 668
+ _la = self._input.LA(1)
+ if not(((((_la - 5)) & ~0x3f) == 0 and ((1 << (_la - 5)) & ((1 << (CParser.T__4 - 5)) | (1 << (CParser.T__55 - 5)) | (1 << (CParser.T__56 - 5)) | (1 << (CParser.T__57 - 5)) | (1 << (CParser.T__58 - 5)) | (1 << (CParser.T__59 - 5)) | (1 << (CParser.T__60 - 5)) | (1 << (CParser.T__61 - 5)) | (1 << (CParser.T__62 - 5)) | (1 << (CParser.T__63 - 5)) | (1 << (CParser.T__64 - 5)))) != 0)):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Conditional_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.e = None # Logical_or_expressionContext
+
+ def logical_or_expression(self):
+ return self.getTypedRuleContext(CParser.Logical_or_expressionContext,0)
+
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def conditional_expression(self):
+ return self.getTypedRuleContext(CParser.Conditional_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_conditional_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterConditional_expression" ):
+ listener.enterConditional_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitConditional_expression" ):
+ listener.exitConditional_expression(self)
+
+
+
+
+ def conditional_expression(self):
+
+ localctx = CParser.Conditional_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 100, self.RULE_conditional_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 670
+ localctx.e = self.logical_or_expression()
+ self.state = 677
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__65:
+ self.state = 671
+ self.match(CParser.T__65)
+ self.state = 672
+ self.expression()
+ self.state = 673
+ self.match(CParser.T__22)
+ self.state = 674
+ self.conditional_expression()
+ self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Logical_or_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def logical_and_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Logical_and_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Logical_and_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_logical_or_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterLogical_or_expression" ):
+ listener.enterLogical_or_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitLogical_or_expression" ):
+ listener.exitLogical_or_expression(self)
+
+
+
+
+ def logical_or_expression(self):
+
+ localctx = CParser.Logical_or_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 102, self.RULE_logical_or_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 679
+ self.logical_and_expression()
+ self.state = 684
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__66:
+ self.state = 680
+ self.match(CParser.T__66)
+ self.state = 681
+ self.logical_and_expression()
+ self.state = 686
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Logical_and_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def inclusive_or_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Inclusive_or_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Inclusive_or_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_logical_and_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterLogical_and_expression" ):
+ listener.enterLogical_and_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitLogical_and_expression" ):
+ listener.exitLogical_and_expression(self)
+
+
+
+
+ def logical_and_expression(self):
+
+ localctx = CParser.Logical_and_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 104, self.RULE_logical_and_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 687
+ self.inclusive_or_expression()
+ self.state = 692
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__67:
+ self.state = 688
+ self.match(CParser.T__67)
+ self.state = 689
+ self.inclusive_or_expression()
+ self.state = 694
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Inclusive_or_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def exclusive_or_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Exclusive_or_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Exclusive_or_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_inclusive_or_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterInclusive_or_expression" ):
+ listener.enterInclusive_or_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitInclusive_or_expression" ):
+ listener.exitInclusive_or_expression(self)
+
+
+
+
+ def inclusive_or_expression(self):
+
+ localctx = CParser.Inclusive_or_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 106, self.RULE_inclusive_or_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 695
+ self.exclusive_or_expression()
+ self.state = 700
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__68:
+ self.state = 696
+ self.match(CParser.T__68)
+ self.state = 697
+ self.exclusive_or_expression()
+ self.state = 702
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Exclusive_or_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def and_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.And_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.And_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_exclusive_or_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterExclusive_or_expression" ):
+ listener.enterExclusive_or_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitExclusive_or_expression" ):
+ listener.exitExclusive_or_expression(self)
+
+
+
+
+ def exclusive_or_expression(self):
+
+ localctx = CParser.Exclusive_or_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 108, self.RULE_exclusive_or_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 703
+ self.and_expression()
+ self.state = 708
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__69:
+ self.state = 704
+ self.match(CParser.T__69)
+ self.state = 705
+ self.and_expression()
+ self.state = 710
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class And_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def equality_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Equality_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Equality_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_and_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAnd_expression" ):
+ listener.enterAnd_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAnd_expression" ):
+ listener.exitAnd_expression(self)
+
+
+
+
+ def and_expression(self):
+
+ localctx = CParser.And_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 110, self.RULE_and_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 711
+ self.equality_expression()
+ self.state = 716
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__52:
+ self.state = 712
+ self.match(CParser.T__52)
+ self.state = 713
+ self.equality_expression()
+ self.state = 718
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Equality_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def relational_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Relational_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Relational_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_equality_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterEquality_expression" ):
+ listener.enterEquality_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitEquality_expression" ):
+ listener.exitEquality_expression(self)
+
+
+
+
+ def equality_expression(self):
+
+ localctx = CParser.Equality_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 112, self.RULE_equality_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 719
+ self.relational_expression()
+ self.state = 724
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__70 or _la==CParser.T__71:
+ self.state = 720
+ _la = self._input.LA(1)
+ if not(_la==CParser.T__70 or _la==CParser.T__71):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 721
+ self.relational_expression()
+ self.state = 726
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Relational_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def shift_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Shift_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Shift_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_relational_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterRelational_expression" ):
+ listener.enterRelational_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitRelational_expression" ):
+ listener.exitRelational_expression(self)
+
+
+
+
+ def relational_expression(self):
+
+ localctx = CParser.Relational_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 114, self.RULE_relational_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 727
+ self.shift_expression()
+ self.state = 732
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while ((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & ((1 << (CParser.T__72 - 73)) | (1 << (CParser.T__73 - 73)) | (1 << (CParser.T__74 - 73)) | (1 << (CParser.T__75 - 73)))) != 0):
+ self.state = 728
+ _la = self._input.LA(1)
+ if not(((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & ((1 << (CParser.T__72 - 73)) | (1 << (CParser.T__73 - 73)) | (1 << (CParser.T__74 - 73)) | (1 << (CParser.T__75 - 73)))) != 0)):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 729
+ self.shift_expression()
+ self.state = 734
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Shift_expressionContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def additive_expression(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.Additive_expressionContext)
+ else:
+ return self.getTypedRuleContext(CParser.Additive_expressionContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_shift_expression
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterShift_expression" ):
+ listener.enterShift_expression(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitShift_expression" ):
+ listener.exitShift_expression(self)
+
+
+
+
+ def shift_expression(self):
+
+ localctx = CParser.Shift_expressionContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 116, self.RULE_shift_expression)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 735
+ self.additive_expression()
+ self.state = 740
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while _la==CParser.T__76 or _la==CParser.T__77:
+ self.state = 736
+ _la = self._input.LA(1)
+ if not(_la==CParser.T__76 or _la==CParser.T__77):
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 737
+ self.additive_expression()
+ self.state = 742
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class StatementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def labeled_statement(self):
+ return self.getTypedRuleContext(CParser.Labeled_statementContext,0)
+
+
+ def compound_statement(self):
+ return self.getTypedRuleContext(CParser.Compound_statementContext,0)
+
+
+ def expression_statement(self):
+ return self.getTypedRuleContext(CParser.Expression_statementContext,0)
+
+
+ def selection_statement(self):
+ return self.getTypedRuleContext(CParser.Selection_statementContext,0)
+
+
+ def iteration_statement(self):
+ return self.getTypedRuleContext(CParser.Iteration_statementContext,0)
+
+
+ def jump_statement(self):
+ return self.getTypedRuleContext(CParser.Jump_statementContext,0)
+
+
+ def macro_statement(self):
+ return self.getTypedRuleContext(CParser.Macro_statementContext,0)
+
+
+ def asm2_statement(self):
+ return self.getTypedRuleContext(CParser.Asm2_statementContext,0)
+
+
+ def asm1_statement(self):
+ return self.getTypedRuleContext(CParser.Asm1_statementContext,0)
+
+
+ def asm_statement(self):
+ return self.getTypedRuleContext(CParser.Asm_statementContext,0)
+
+
+ def declaration(self):
+ return self.getTypedRuleContext(CParser.DeclarationContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStatement" ):
+ listener.enterStatement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStatement" ):
+ listener.exitStatement(self)
+
+
+
+
+ def statement(self):
+
+ localctx = CParser.StatementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 118, self.RULE_statement)
+ try:
+ self.state = 754
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,92,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 743
+ self.labeled_statement()
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 744
+ self.compound_statement()
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 745
+ self.expression_statement()
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 746
+ self.selection_statement()
+ pass
+
+ elif la_ == 5:
+ self.enterOuterAlt(localctx, 5)
+ self.state = 747
+ self.iteration_statement()
+ pass
+
+ elif la_ == 6:
+ self.enterOuterAlt(localctx, 6)
+ self.state = 748
+ self.jump_statement()
+ pass
+
+ elif la_ == 7:
+ self.enterOuterAlt(localctx, 7)
+ self.state = 749
+ self.macro_statement()
+ pass
+
+ elif la_ == 8:
+ self.enterOuterAlt(localctx, 8)
+ self.state = 750
+ self.asm2_statement()
+ pass
+
+ elif la_ == 9:
+ self.enterOuterAlt(localctx, 9)
+ self.state = 751
+ self.asm1_statement()
+ pass
+
+ elif la_ == 10:
+ self.enterOuterAlt(localctx, 10)
+ self.state = 752
+ self.asm_statement()
+ pass
+
+ elif la_ == 11:
+ self.enterOuterAlt(localctx, 11)
+ self.state = 753
+ self.declaration()
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Asm2_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def getRuleIndex(self):
+ return CParser.RULE_asm2_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAsm2_statement" ):
+ listener.enterAsm2_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAsm2_statement" ):
+ listener.exitAsm2_statement(self)
+
+
+
+
+ def asm2_statement(self):
+
+ localctx = CParser.Asm2_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 120, self.RULE_asm2_statement)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 757
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if _la==CParser.T__78:
+ self.state = 756
+ self.match(CParser.T__78)
+
+
+ self.state = 759
+ self.match(CParser.IDENTIFIER)
+ self.state = 760
+ self.match(CParser.T__37)
+ self.state = 764
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,94,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 761
+ _la = self._input.LA(1)
+ if _la <= 0 or _la==CParser.T__1:
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 766
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,94,self._ctx)
+
+ self.state = 767
+ self.match(CParser.T__38)
+ self.state = 768
+ self.match(CParser.T__1)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Asm1_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_asm1_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAsm1_statement" ):
+ listener.enterAsm1_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAsm1_statement" ):
+ listener.exitAsm1_statement(self)
+
+
+
+
+ def asm1_statement(self):
+
+ localctx = CParser.Asm1_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 122, self.RULE_asm1_statement)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 770
+ self.match(CParser.T__79)
+ self.state = 771
+ self.match(CParser.T__0)
+ self.state = 775
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__0) | (1 << CParser.T__1) | (1 << CParser.T__2) | (1 << CParser.T__3) | (1 << CParser.T__4) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__22) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__38) | (1 << CParser.T__39) | (1 << CParser.T__40) | (1 << CParser.T__41) | (1 << CParser.T__42) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__45) | (1 << CParser.T__46) | (1 << CParser.T__47) | (1 << CParser.T__48) | (1 << CParser.T__49) | (1 << CParser.T__50) | (1 << CParser.T__51) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54) | (1 << CParser.T__55) | (1 << CParser.T__56) | (1 << CParser.T__57) | (1 << CParser.T__58) | (1 << CParser.T__59) | (1 << CParser.T__60) | (1 << CParser.T__61) | (1 << CParser.T__62))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CParser.T__63 - 64)) | (1 << (CParser.T__64 - 64)) | (1 << (CParser.T__65 - 64)) | (1 << (CParser.T__66 - 64)) | (1 << (CParser.T__67 - 64)) | (1 << (CParser.T__68 - 64)) | (1 << (CParser.T__69 - 64)) | (1 << (CParser.T__70 - 64)) | (1 << (CParser.T__71 - 64)) | (1 << (CParser.T__72 - 64)) | (1 << (CParser.T__73 - 64)) | (1 << (CParser.T__74 - 64)) | (1 << (CParser.T__75 - 64)) | (1 << (CParser.T__76 - 64)) | (1 << (CParser.T__77 - 64)) | (1 << (CParser.T__78 - 64)) | (1 << (CParser.T__79 - 64)) | (1 << (CParser.T__80 - 64)) | (1 << (CParser.T__81 - 64)) | (1 << (CParser.T__82 - 64)) | (1 << (CParser.T__83 - 64)) | (1 << (CParser.T__84 - 64)) | (1 << (CParser.T__85 - 64)) | (1 << (CParser.T__86 - 64)) | (1 << (CParser.T__87 - 64)) | (1 << (CParser.T__88 - 64)) | (1 << (CParser.T__89 - 64)) | (1 << (CParser.T__90 - 64)) | (1 << (CParser.T__91 - 64)) | (1 << (CParser.IDENTIFIER - 64)) | (1 << (CParser.CHARACTER_LITERAL - 64)) | (1 << (CParser.STRING_LITERAL - 64)) | (1 << (CParser.HEX_LITERAL - 64)) | (1 << (CParser.DECIMAL_LITERAL - 64)) | (1 << (CParser.OCTAL_LITERAL - 64)) | (1 << (CParser.FLOATING_POINT_LITERAL - 64)) | (1 << (CParser.WS - 64)) | (1 << (CParser.BS - 64)) | (1 << (CParser.UnicodeVocabulary - 64)) | (1 << (CParser.COMMENT - 64)) | (1 << (CParser.LINE_COMMENT - 64)) | (1 << (CParser.LINE_COMMAND - 64)))) != 0):
+ self.state = 772
+ _la = self._input.LA(1)
+ if _la <= 0 or _la==CParser.T__19:
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 777
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 778
+ self.match(CParser.T__19)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Asm_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_asm_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterAsm_statement" ):
+ listener.enterAsm_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitAsm_statement" ):
+ listener.exitAsm_statement(self)
+
+
+
+
+ def asm_statement(self):
+
+ localctx = CParser.Asm_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 124, self.RULE_asm_statement)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 780
+ self.match(CParser.T__80)
+ self.state = 781
+ self.match(CParser.T__0)
+ self.state = 785
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__0) | (1 << CParser.T__1) | (1 << CParser.T__2) | (1 << CParser.T__3) | (1 << CParser.T__4) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__22) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__38) | (1 << CParser.T__39) | (1 << CParser.T__40) | (1 << CParser.T__41) | (1 << CParser.T__42) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__45) | (1 << CParser.T__46) | (1 << CParser.T__47) | (1 << CParser.T__48) | (1 << CParser.T__49) | (1 << CParser.T__50) | (1 << CParser.T__51) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54) | (1 << CParser.T__55) | (1 << CParser.T__56) | (1 << CParser.T__57) | (1 << CParser.T__58) | (1 << CParser.T__59) | (1 << CParser.T__60) | (1 << CParser.T__61) | (1 << CParser.T__62))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CParser.T__63 - 64)) | (1 << (CParser.T__64 - 64)) | (1 << (CParser.T__65 - 64)) | (1 << (CParser.T__66 - 64)) | (1 << (CParser.T__67 - 64)) | (1 << (CParser.T__68 - 64)) | (1 << (CParser.T__69 - 64)) | (1 << (CParser.T__70 - 64)) | (1 << (CParser.T__71 - 64)) | (1 << (CParser.T__72 - 64)) | (1 << (CParser.T__73 - 64)) | (1 << (CParser.T__74 - 64)) | (1 << (CParser.T__75 - 64)) | (1 << (CParser.T__76 - 64)) | (1 << (CParser.T__77 - 64)) | (1 << (CParser.T__78 - 64)) | (1 << (CParser.T__79 - 64)) | (1 << (CParser.T__80 - 64)) | (1 << (CParser.T__81 - 64)) | (1 << (CParser.T__82 - 64)) | (1 << (CParser.T__83 - 64)) | (1 << (CParser.T__84 - 64)) | (1 << (CParser.T__85 - 64)) | (1 << (CParser.T__86 - 64)) | (1 << (CParser.T__87 - 64)) | (1 << (CParser.T__88 - 64)) | (1 << (CParser.T__89 - 64)) | (1 << (CParser.T__90 - 64)) | (1 << (CParser.T__91 - 64)) | (1 << (CParser.IDENTIFIER - 64)) | (1 << (CParser.CHARACTER_LITERAL - 64)) | (1 << (CParser.STRING_LITERAL - 64)) | (1 << (CParser.HEX_LITERAL - 64)) | (1 << (CParser.DECIMAL_LITERAL - 64)) | (1 << (CParser.OCTAL_LITERAL - 64)) | (1 << (CParser.FLOATING_POINT_LITERAL - 64)) | (1 << (CParser.WS - 64)) | (1 << (CParser.BS - 64)) | (1 << (CParser.UnicodeVocabulary - 64)) | (1 << (CParser.COMMENT - 64)) | (1 << (CParser.LINE_COMMENT - 64)) | (1 << (CParser.LINE_COMMAND - 64)))) != 0):
+ self.state = 782
+ _la = self._input.LA(1)
+ if _la <= 0 or _la==CParser.T__19:
+ self._errHandler.recoverInline(self)
+ else:
+ self._errHandler.reportMatch(self)
+ self.consume()
+ self.state = 787
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+
+ self.state = 788
+ self.match(CParser.T__19)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Macro_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ # @param i=None Type: int
+ def declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.DeclarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.DeclarationContext,i)
+
+
+ def statement_list(self):
+ return self.getTypedRuleContext(CParser.Statement_listContext,0)
+
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_macro_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterMacro_statement" ):
+ listener.enterMacro_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitMacro_statement" ):
+ listener.exitMacro_statement(self)
+
+
+
+
+ def macro_statement(self):
+
+ localctx = CParser.Macro_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 126, self.RULE_macro_statement)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 790
+ self.match(CParser.IDENTIFIER)
+ self.state = 791
+ self.match(CParser.T__37)
+ self.state = 795
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,97,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 792
+ self.declaration()
+ self.state = 797
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,97,self._ctx)
+
+ self.state = 799
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,98,self._ctx)
+ if la_ == 1:
+ self.state = 798
+ self.statement_list()
+
+
+ self.state = 802
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if ((((_la - 38)) & ~0x3f) == 0 and ((1 << (_la - 38)) & ((1 << (CParser.T__37 - 38)) | (1 << (CParser.T__41 - 38)) | (1 << (CParser.T__43 - 38)) | (1 << (CParser.T__44 - 38)) | (1 << (CParser.T__47 - 38)) | (1 << (CParser.T__48 - 38)) | (1 << (CParser.T__49 - 38)) | (1 << (CParser.T__52 - 38)) | (1 << (CParser.T__53 - 38)) | (1 << (CParser.T__54 - 38)) | (1 << (CParser.IDENTIFIER - 38)) | (1 << (CParser.CHARACTER_LITERAL - 38)) | (1 << (CParser.STRING_LITERAL - 38)) | (1 << (CParser.HEX_LITERAL - 38)) | (1 << (CParser.DECIMAL_LITERAL - 38)) | (1 << (CParser.OCTAL_LITERAL - 38)) | (1 << (CParser.FLOATING_POINT_LITERAL - 38)))) != 0):
+ self.state = 801
+ self.expression()
+
+
+ self.state = 804
+ self.match(CParser.T__38)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Labeled_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def statement(self):
+ return self.getTypedRuleContext(CParser.StatementContext,0)
+
+
+ def constant_expression(self):
+ return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_labeled_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterLabeled_statement" ):
+ listener.enterLabeled_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitLabeled_statement" ):
+ listener.exitLabeled_statement(self)
+
+
+
+
+ def labeled_statement(self):
+
+ localctx = CParser.Labeled_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 128, self.RULE_labeled_statement)
+ try:
+ self.state = 817
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.IDENTIFIER]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 806
+ self.match(CParser.IDENTIFIER)
+ self.state = 807
+ self.match(CParser.T__22)
+ self.state = 808
+ self.statement()
+ pass
+ elif token in [CParser.T__81]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 809
+ self.match(CParser.T__81)
+ self.state = 810
+ self.constant_expression()
+ self.state = 811
+ self.match(CParser.T__22)
+ self.state = 812
+ self.statement()
+ pass
+ elif token in [CParser.T__82]:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 814
+ self.match(CParser.T__82)
+ self.state = 815
+ self.match(CParser.T__22)
+ self.state = 816
+ self.statement()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Compound_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def declaration(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.DeclarationContext)
+ else:
+ return self.getTypedRuleContext(CParser.DeclarationContext,i)
+
+
+ def statement_list(self):
+ return self.getTypedRuleContext(CParser.Statement_listContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_compound_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterCompound_statement" ):
+ listener.enterCompound_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitCompound_statement" ):
+ listener.exitCompound_statement(self)
+
+
+
+
+ def compound_statement(self):
+
+ localctx = CParser.Compound_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 130, self.RULE_compound_statement)
+ self._la = 0 # Token type
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 819
+ self.match(CParser.T__0)
+ self.state = 823
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,101,self._ctx)
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt==1:
+ self.state = 820
+ self.declaration()
+ self.state = 825
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,101,self._ctx)
+
+ self.state = 827
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__0) | (1 << CParser.T__1) | (1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__41) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__47) | (1 << CParser.T__48) | (1 << CParser.T__49) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54))) != 0) or ((((_la - 79)) & ~0x3f) == 0 and ((1 << (_la - 79)) & ((1 << (CParser.T__78 - 79)) | (1 << (CParser.T__79 - 79)) | (1 << (CParser.T__80 - 79)) | (1 << (CParser.T__81 - 79)) | (1 << (CParser.T__82 - 79)) | (1 << (CParser.T__83 - 79)) | (1 << (CParser.T__85 - 79)) | (1 << (CParser.T__86 - 79)) | (1 << (CParser.T__87 - 79)) | (1 << (CParser.T__88 - 79)) | (1 << (CParser.T__89 - 79)) | (1 << (CParser.T__90 - 79)) | (1 << (CParser.T__91 - 79)) | (1 << (CParser.IDENTIFIER - 79)) | (1 << (CParser.CHARACTER_LITERAL - 79)) | (1 << (CParser.STRING_LITERAL - 79)) | (1 << (CParser.HEX_LITERAL - 79)) | (1 << (CParser.DECIMAL_LITERAL - 79)) | (1 << (CParser.OCTAL_LITERAL - 79)) | (1 << (CParser.FLOATING_POINT_LITERAL - 79)))) != 0):
+ self.state = 826
+ self.statement_list()
+
+
+ self.state = 829
+ self.match(CParser.T__19)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Statement_listContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ # @param i=None Type: int
+ def statement(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.StatementContext)
+ else:
+ return self.getTypedRuleContext(CParser.StatementContext,i)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_statement_list
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterStatement_list" ):
+ listener.enterStatement_list(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitStatement_list" ):
+ listener.exitStatement_list(self)
+
+
+
+
+ def statement_list(self):
+
+ localctx = CParser.Statement_listContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 132, self.RULE_statement_list)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 832
+ self._errHandler.sync(self)
+ _alt = 1
+ while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
+ if _alt == 1:
+ self.state = 831
+ self.statement()
+
+ else:
+ raise NoViableAltException(self)
+ self.state = 834
+ self._errHandler.sync(self)
+ _alt = self._interp.adaptivePredict(self._input,103,self._ctx)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Expression_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_expression_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterExpression_statement" ):
+ listener.enterExpression_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitExpression_statement" ):
+ listener.exitExpression_statement(self)
+
+
+
+
+ def expression_statement(self):
+
+ localctx = CParser.Expression_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 134, self.RULE_expression_statement)
+ try:
+ self.state = 840
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__1]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 836
+ self.match(CParser.T__1)
+ pass
+ elif token in [CParser.T__37, CParser.T__41, CParser.T__43, CParser.T__44, CParser.T__47, CParser.T__48, CParser.T__49, CParser.T__52, CParser.T__53, CParser.T__54, CParser.IDENTIFIER, CParser.CHARACTER_LITERAL, CParser.STRING_LITERAL, CParser.HEX_LITERAL, CParser.DECIMAL_LITERAL, CParser.OCTAL_LITERAL, CParser.FLOATING_POINT_LITERAL]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 837
+ self.expression()
+ self.state = 838
+ self.match(CParser.T__1)
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Selection_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.e = None # ExpressionContext
+
+ # @param i=None Type: int
+ def statement(self,i=None):
+ if i is None:
+ return self.getTypedRuleContexts(CParser.StatementContext)
+ else:
+ return self.getTypedRuleContext(CParser.StatementContext,i)
+
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_selection_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterSelection_statement" ):
+ listener.enterSelection_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitSelection_statement" ):
+ listener.exitSelection_statement(self)
+
+
+
+
+ def selection_statement(self):
+
+ localctx = CParser.Selection_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 136, self.RULE_selection_statement)
+ try:
+ self.state = 858
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__83]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 842
+ self.match(CParser.T__83)
+ self.state = 843
+ self.match(CParser.T__37)
+ self.state = 844
+ localctx.e = self.expression()
+ self.state = 845
+ self.match(CParser.T__38)
+ self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
+ self.state = 847
+ self.statement()
+ self.state = 850
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,105,self._ctx)
+ if la_ == 1:
+ self.state = 848
+ self.match(CParser.T__84)
+ self.state = 849
+ self.statement()
+
+
+ pass
+ elif token in [CParser.T__85]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 852
+ self.match(CParser.T__85)
+ self.state = 853
+ self.match(CParser.T__37)
+ self.state = 854
+ self.expression()
+ self.state = 855
+ self.match(CParser.T__38)
+ self.state = 856
+ self.statement()
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Iteration_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.e = None # ExpressionContext
+
+ def statement(self):
+ return self.getTypedRuleContext(CParser.StatementContext,0)
+
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_iteration_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterIteration_statement" ):
+ listener.enterIteration_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitIteration_statement" ):
+ listener.exitIteration_statement(self)
+
+
+
+
+ def iteration_statement(self):
+
+ localctx = CParser.Iteration_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 138, self.RULE_iteration_statement)
+ try:
+ self.state = 876
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [CParser.T__86]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 860
+ self.match(CParser.T__86)
+ self.state = 861
+ self.match(CParser.T__37)
+ self.state = 862
+ localctx.e = self.expression()
+ self.state = 863
+ self.match(CParser.T__38)
+ self.state = 864
+ self.statement()
+ self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
+ pass
+ elif token in [CParser.T__87]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 867
+ self.match(CParser.T__87)
+ self.state = 868
+ self.statement()
+ self.state = 869
+ self.match(CParser.T__86)
+ self.state = 870
+ self.match(CParser.T__37)
+ self.state = 871
+ localctx.e = self.expression()
+ self.state = 872
+ self.match(CParser.T__38)
+ self.state = 873
+ self.match(CParser.T__1)
+ self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+ class Jump_statementContext(ParserRuleContext):
+
+ # @param parent=None Type: ParserRuleContext
+ # @param invokingState=-1 Type: int
+ def __init__(self,parser,parent=None,invokingState=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+
+ def IDENTIFIER(self):
+ return self.getToken(CParser.IDENTIFIER, 0)
+
+ def expression(self):
+ return self.getTypedRuleContext(CParser.ExpressionContext,0)
+
+
+ def getRuleIndex(self):
+ return CParser.RULE_jump_statement
+
+ # @param listener Type: ParseTreeListener
+ def enterRule(self,listener):
+ if hasattr( listener, "enterJump_statement" ):
+ listener.enterJump_statement(self)
+
+ # @param listener Type: ParseTreeListener
+ def exitRule(self,listener):
+ if hasattr( listener, "exitJump_statement" ):
+ listener.exitJump_statement(self)
+
+
+
+
+ def jump_statement(self):
+
+ localctx = CParser.Jump_statementContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 140, self.RULE_jump_statement)
+ try:
+ self.state = 891
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,108,self._ctx)
+ if la_ == 1:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 878
+ self.match(CParser.T__88)
+ self.state = 879
+ self.match(CParser.IDENTIFIER)
+ self.state = 880
+ self.match(CParser.T__1)
+ pass
+
+ elif la_ == 2:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 881
+ self.match(CParser.T__89)
+ self.state = 882
+ self.match(CParser.T__1)
+ pass
+
+ elif la_ == 3:
+ self.enterOuterAlt(localctx, 3)
+ self.state = 883
+ self.match(CParser.T__90)
+ self.state = 884
+ self.match(CParser.T__1)
+ pass
+
+ elif la_ == 4:
+ self.enterOuterAlt(localctx, 4)
+ self.state = 885
+ self.match(CParser.T__91)
+ self.state = 886
+ self.match(CParser.T__1)
+ pass
+
+ elif la_ == 5:
+ self.enterOuterAlt(localctx, 5)
+ self.state = 887
+ self.match(CParser.T__91)
+ self.state = 888
+ self.expression()
+ self.state = 889
+ self.match(CParser.T__1)
+ pass
+
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
+
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CParser4/__init__.py
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CodeFragment.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CodeFragment.py
new file mode 100755
index 00000000..d421c029
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CodeFragment.py
@@ -0,0 +1,179 @@
+## @file
+# fragments of source file
+#
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+
+## The description of comment contents and start & end position
+#
+#
+class Comment :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ # @param CommentType The type of comment (T_COMMENT_TWO_SLASH or T_COMMENT_SLASH_STAR).
+ #
+ def __init__(self, Str, Begin, End, CommentType):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+ self.Type = CommentType
+
+## The description of preprocess directives and start & end position
+#
+#
+class PP_Directive :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Str, Begin, End):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of assignment expression and start & end position
+#
+#
+class AssignmentExpression :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Lvalue, Op, Exp, Begin, End):
+ self.Name = Lvalue
+ self.Operator = Op
+ self.Value = Exp
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of predicate expression and start & end position
+#
+#
+class PredicateExpression :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Str, Begin, End):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of function definition and start & end position
+#
+#
+class FunctionDefinition :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ # @param LBPos The left brace position tuple.
+ #
+ def __init__(self, ModifierStr, DeclStr, Begin, End, LBPos, NamePos):
+ self.Modifier = ModifierStr
+ self.Declarator = DeclStr
+ self.StartPos = Begin
+ self.EndPos = End
+ self.LeftBracePos = LBPos
+ self.NamePos = NamePos
+
+## The description of variable declaration and start & end position
+#
+#
+class VariableDeclaration :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, ModifierStr, DeclStr, Begin, End):
+ self.Modifier = ModifierStr
+ self.Declarator = DeclStr
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of enum definition and start & end position
+#
+#
+class EnumerationDefinition :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Str, Begin, End):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of struct/union definition and start & end position
+#
+#
+class StructUnionDefinition :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Str, Begin, End):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of 'Typedef' definition and start & end position
+#
+#
+class TypedefDefinition :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, FromStr, ToStr, Begin, End):
+ self.FromType = FromStr
+ self.ToType = ToStr
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of function calling definition and start & end position
+#
+#
+class FunctionCalling:
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Name, Param, Begin, End):
+ self.FuncName = Name
+ self.ParamList = Param
+ self.StartPos = Begin
+ self.EndPos = End
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CodeFragmentCollector.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CodeFragmentCollector.py
new file mode 100755
index 00000000..e74bc1a8
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/CodeFragmentCollector.py
@@ -0,0 +1,435 @@
+## @file
+# preprocess source file
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+import re
+import Common.LongFilePathOs as os
+import sys
+
+if sys.version_info.major == 3:
+ import antlr4 as antlr
+ from Eot.CParser4.CLexer import CLexer
+ from Eot.CParser4.CParser import CParser
+else:
+ import antlr3 as antlr
+ antlr.InputStream = antlr.StringStream
+ from Eot.CParser3.CLexer import CLexer
+ from Eot.CParser3.CParser import CParser
+
+from Eot import FileProfile
+from Eot.CodeFragment import PP_Directive
+from Eot.ParserWarning import Warning
+
+
+##define T_CHAR_SPACE ' '
+##define T_CHAR_NULL '\0'
+##define T_CHAR_CR '\r'
+##define T_CHAR_TAB '\t'
+##define T_CHAR_LF '\n'
+##define T_CHAR_SLASH '/'
+##define T_CHAR_BACKSLASH '\\'
+##define T_CHAR_DOUBLE_QUOTE '\"'
+##define T_CHAR_SINGLE_QUOTE '\''
+##define T_CHAR_STAR '*'
+##define T_CHAR_HASH '#'
+
+(T_CHAR_SPACE, T_CHAR_NULL, T_CHAR_CR, T_CHAR_TAB, T_CHAR_LF, T_CHAR_SLASH, \
+T_CHAR_BACKSLASH, T_CHAR_DOUBLE_QUOTE, T_CHAR_SINGLE_QUOTE, T_CHAR_STAR, T_CHAR_HASH) = \
+(' ', '\0', '\r', '\t', '\n', '/', '\\', '\"', '\'', '*', '#')
+
+SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
+
+(T_COMMENT_TWO_SLASH, T_COMMENT_SLASH_STAR) = (0, 1)
+
+(T_PP_INCLUDE, T_PP_DEFINE, T_PP_OTHERS) = (0, 1, 2)
+
+## The collector for source code fragments.
+#
+# PreprocessFile method should be called prior to ParseFile
+#
+# GetNext*** procedures mean these procedures will get next token first, then make judgement.
+# Get*** procedures mean these procedures will make judgement on current token only.
+#
+class CodeFragmentCollector:
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.Profile = FileProfile.FileProfile(FileName)
+ self.Profile.FileLinesList.append(T_CHAR_LF)
+ self.FileName = FileName
+ self.CurrentLineNumber = 1
+ self.CurrentOffsetWithinLine = 0
+
+ self.__Token = ""
+ self.__SkippedChars = ""
+
+ ## __EndOfFile() method
+ #
+ # Judge current buffer pos is at file end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at file end
+ # @retval False Current File buffer position is NOT at file end
+ #
+ def __EndOfFile(self):
+ NumberOfLines = len(self.Profile.FileLinesList)
+ SizeOfLastLine = len(self.Profile.FileLinesList[-1])
+ if self.CurrentLineNumber == NumberOfLines and self.CurrentOffsetWithinLine >= SizeOfLastLine - 1:
+ return True
+ elif self.CurrentLineNumber > NumberOfLines:
+ return True
+ else:
+ return False
+
+ ## __EndOfLine() method
+ #
+ # Judge current buffer pos is at line end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at line end
+ # @retval False Current File buffer position is NOT at line end
+ #
+ def __EndOfLine(self):
+ SizeOfCurrentLine = len(self.Profile.FileLinesList[self.CurrentLineNumber - 1])
+ if self.CurrentOffsetWithinLine >= SizeOfCurrentLine - 1:
+ return True
+ else:
+ return False
+
+ ## Rewind() method
+ #
+ # Reset file data buffer to the initial state
+ #
+ # @param self The object pointer
+ #
+ def Rewind(self):
+ self.CurrentLineNumber = 1
+ self.CurrentOffsetWithinLine = 0
+
+ ## __UndoOneChar() method
+ #
+ # Go back one char in the file buffer
+ #
+ # @param self The object pointer
+ # @retval True Successfully go back one char
+ # @retval False Not able to go back one char as file beginning reached
+ #
+ def __UndoOneChar(self):
+
+ if self.CurrentLineNumber == 1 and self.CurrentOffsetWithinLine == 0:
+ return False
+ elif self.CurrentOffsetWithinLine == 0:
+ self.CurrentLineNumber -= 1
+ self.CurrentOffsetWithinLine = len(self.__CurrentLine()) - 1
+ else:
+ self.CurrentOffsetWithinLine -= 1
+ return True
+
+ ## __GetOneChar() method
+ #
+ # Move forward one char in the file buffer
+ #
+ # @param self The object pointer
+ #
+ def __GetOneChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ else:
+ self.CurrentOffsetWithinLine += 1
+
+ ## __CurrentChar() method
+ #
+ # Get the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Current char
+ #
+ def __CurrentChar(self):
+ CurrentChar = self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine]
+
+ return CurrentChar
+
+ ## __NextChar() method
+ #
+ # Get the one char pass the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Next char
+ #
+ def __NextChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ return self.Profile.FileLinesList[self.CurrentLineNumber][0]
+ else:
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine + 1]
+
+ ## __SetCurrentCharValue() method
+ #
+ # Modify the value of current char
+ #
+ # @param self The object pointer
+ # @param Value The new value of current char
+ #
+ def __SetCurrentCharValue(self, Value):
+ self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine] = Value
+
+ ## __SetCharValue() method
+ #
+ # Modify the value of current char
+ #
+ # @param self The object pointer
+ # @param Value The new value of current char
+ #
+ def __SetCharValue(self, Line, Offset, Value):
+ self.Profile.FileLinesList[Line - 1][Offset] = Value
+
+ ## __CurrentLine() method
+ #
+ # Get the list that contains current line contents
+ #
+ # @param self The object pointer
+ # @retval List current line contents
+ #
+ def __CurrentLine(self):
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1]
+
+ ## __InsertComma() method
+ #
+ # Insert ',' to replace PP
+ #
+ # @param self The object pointer
+ # @retval List current line contents
+ #
+ def __InsertComma(self, Line):
+
+
+ if self.Profile.FileLinesList[Line - 1][0] != T_CHAR_HASH:
+ BeforeHashPart = str(self.Profile.FileLinesList[Line - 1]).split(T_CHAR_HASH)[0]
+ if BeforeHashPart.rstrip().endswith(T_CHAR_COMMA) or BeforeHashPart.rstrip().endswith(';'):
+ return
+
+ if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(','):
+ return
+
+ if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(';'):
+ return
+
+ if str(self.Profile.FileLinesList[Line]).lstrip().startswith(',') or str(self.Profile.FileLinesList[Line]).lstrip().startswith(';'):
+ return
+
+ self.Profile.FileLinesList[Line - 1].insert(self.CurrentOffsetWithinLine, ',')
+
+ ## PreprocessFileWithClear() method
+ #
+ # Run a preprocess for the file to clean all comments
+ #
+ # @param self The object pointer
+ #
+ def PreprocessFileWithClear(self):
+
+ self.Rewind()
+ InComment = False
+ DoubleSlashComment = False
+ HashComment = False
+ PPExtend = False
+ PPDirectiveObj = None
+ # HashComment in quoted string " " is ignored.
+ InString = False
+ InCharLiteral = False
+
+ self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesListFromFile]
+ while not self.__EndOfFile():
+
+ if not InComment and self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE:
+ InString = not InString
+
+ if not InComment and self.__CurrentChar() == T_CHAR_SINGLE_QUOTE:
+ InCharLiteral = not InCharLiteral
+ # meet new line, then no longer in a comment for // and '#'
+ if self.__CurrentChar() == T_CHAR_LF:
+ if HashComment and PPDirectiveObj is not None:
+ if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
+ PPDirectiveObj.Content += T_CHAR_LF
+ PPExtend = True
+ else:
+ PPExtend = False
+
+ EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ if InComment and DoubleSlashComment:
+ InComment = False
+ DoubleSlashComment = False
+
+ if InComment and HashComment and not PPExtend:
+ InComment = False
+ HashComment = False
+ PPDirectiveObj.Content += T_CHAR_LF
+ PPDirectiveObj.EndPos = EndLinePos
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+ PPDirectiveObj = None
+
+ if InString or InCharLiteral:
+ CurrentLine = "".join(self.__CurrentLine())
+ if CurrentLine.rstrip(T_CHAR_LF).rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
+ SlashIndex = CurrentLine.rindex(T_CHAR_BACKSLASH)
+ self.__SetCharValue(self.CurrentLineNumber, SlashIndex, T_CHAR_SPACE)
+
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ # check for */ comment end
+ elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
+
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ InComment = False
+ # set comments to spaces
+ elif InComment:
+ if HashComment:
+ # // follows hash PP directive
+ if self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
+ InComment = False
+ HashComment = False
+ PPDirectiveObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine - 1)
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+ PPDirectiveObj = None
+ continue
+ else:
+ PPDirectiveObj.Content += self.__CurrentChar()
+
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ # check for // comment
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
+ InComment = True
+ DoubleSlashComment = True
+
+ # check for '#' comment
+ elif self.__CurrentChar() == T_CHAR_HASH and not InString and not InCharLiteral:
+ InComment = True
+ HashComment = True
+ PPDirectiveObj = PP_Directive('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None)
+ # check for /* comment start
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
+
+ self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ InComment = True
+ else:
+ self.__GetOneChar()
+
+ EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ if InComment and HashComment and not PPExtend:
+ PPDirectiveObj.EndPos = EndLinePos
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+ self.Rewind()
+
+ ## ParseFile() method
+ #
+ # Parse the file profile buffer to extract fd, fv ... information
+ # Exception will be raised if syntax error found
+ #
+ # @param self The object pointer
+ #
+ def ParseFile(self):
+ self.PreprocessFileWithClear()
+ # restore from ListOfList to ListOfString
+ self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
+ FileStringContents = ''
+ for fileLine in self.Profile.FileLinesList:
+ FileStringContents += fileLine
+ cStream = antlr.InputStream(FileStringContents)
+ lexer = CLexer(cStream)
+ tStream = antlr.CommonTokenStream(lexer)
+ parser = CParser(tStream)
+ parser.translation_unit()
+
+ ## CleanFileProfileBuffer() method
+ #
+ # Reset all contents of the profile of a file
+ #
+ def CleanFileProfileBuffer(self):
+
+ FileProfile.PPDirectiveList = []
+ FileProfile.AssignmentExpressionList = []
+ FileProfile.FunctionDefinitionList = []
+ FileProfile.VariableDeclarationList = []
+ FileProfile.EnumerationDefinitionList = []
+ FileProfile.StructUnionDefinitionList = []
+ FileProfile.TypedefDefinitionList = []
+ FileProfile.FunctionCallingList = []
+
+ ## PrintFragments() method
+ #
+ # Print the contents of the profile of a file
+ #
+ def PrintFragments(self):
+
+ print('################# ' + self.FileName + '#####################')
+
+ print('/****************************************/')
+ print('/************** ASSIGNMENTS *************/')
+ print('/****************************************/')
+ for assign in FileProfile.AssignmentExpressionList:
+ print(str(assign.StartPos) + assign.Name + assign.Operator + assign.Value)
+
+ print('/****************************************/')
+ print('/********* PREPROCESS DIRECTIVES ********/')
+ print('/****************************************/')
+ for pp in FileProfile.PPDirectiveList:
+ print(str(pp.StartPos) + pp.Content)
+
+ print('/****************************************/')
+ print('/********* VARIABLE DECLARATIONS ********/')
+ print('/****************************************/')
+ for var in FileProfile.VariableDeclarationList:
+ print(str(var.StartPos) + var.Modifier + ' '+ var.Declarator)
+
+ print('/****************************************/')
+ print('/********* FUNCTION DEFINITIONS *********/')
+ print('/****************************************/')
+ for func in FileProfile.FunctionDefinitionList:
+ print(str(func.StartPos) + func.Modifier + ' '+ func.Declarator + ' ' + str(func.NamePos))
+
+ print('/****************************************/')
+ print('/************ ENUMERATIONS **************/')
+ print('/****************************************/')
+ for enum in FileProfile.EnumerationDefinitionList:
+ print(str(enum.StartPos) + enum.Content)
+
+ print('/****************************************/')
+ print('/*********** STRUCTS/UNIONS *************/')
+ print('/****************************************/')
+ for su in FileProfile.StructUnionDefinitionList:
+ print(str(su.StartPos) + su.Content)
+
+ print('/****************************************/')
+ print('/************** TYPEDEFS ****************/')
+ print('/****************************************/')
+ for typedef in FileProfile.TypedefDefinitionList:
+ print(str(typedef.StartPos) + typedef.ToType)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == "__main__":
+
+ print("For Test.")
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Database.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Database.py
new file mode 100755
index 00000000..31163cbb
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Database.py
@@ -0,0 +1,249 @@
+## @file
+# This file is used to create a database used by EOT tool
+#
+# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+import sqlite3
+import Common.LongFilePathOs as os, time
+
+import Common.EdkLogger as EdkLogger
+import CommonDataClass.DataClass as DataClass
+
+from Table.TableDataModel import TableDataModel
+from Table.TableFile import TableFile
+from Table.TableFunction import TableFunction
+from Table.TableIdentifier import TableIdentifier
+from Table.TableEotReport import TableEotReport
+from Table.TableInf import TableInf
+from Table.TableDec import TableDec
+from Table.TableDsc import TableDsc
+from Table.TableFdf import TableFdf
+from Table.TableQuery import TableQuery
+
+##
+# Static definitions
+#
+DATABASE_PATH = "Eot.db"
+
+## Database class
+#
+# This class defined the EOT database
+# During the phase of initialization, the database will create all tables and
+# insert all records of table DataModel
+#
+class Database(object):
+ ## The constructor
+ #
+ # @param self: The object pointer
+ # @param DbPath: The file path of the database
+ #
+ def __init__(self, DbPath):
+ self.DbPath = DbPath
+ self.Conn = None
+ self.Cur = None
+ self.TblDataModel = None
+ self.TblFile = None
+ self.TblFunction = None
+ self.TblIdentifier = None
+ self.TblReport = None
+ self.TblInf = None
+ self.TblDec = None
+ self.TblDsc = None
+ self.TblFdf = None
+ self.TblQuery = None
+ self.TblQuery2 = None
+
+ ## InitDatabase() method
+ # 1. Delete all old existing tables
+ # 2. Create new tables
+ # 3. Initialize table DataModel
+ #
+ # @param self: The object pointer
+ # @param NewDatabase: Check if it needs to create a new database
+ #
+ def InitDatabase(self, NewDatabase = True):
+ EdkLogger.verbose("\nInitialize EOT database started ...")
+ #
+ # Drop all old existing tables
+ #
+ if NewDatabase:
+ if os.path.exists(self.DbPath):
+ os.remove(self.DbPath)
+ self.Conn = sqlite3.connect(self.DbPath, isolation_level = 'DEFERRED')
+ self.Conn.execute("PRAGMA page_size=8192")
+ self.Conn.execute("PRAGMA synchronous=OFF")
+ # to avoid non-ascii character conversion error
+ self.Conn.text_factory = str
+ self.Cur = self.Conn.cursor()
+
+ self.TblDataModel = TableDataModel(self.Cur)
+ self.TblFile = TableFile(self.Cur)
+ self.TblFunction = TableFunction(self.Cur)
+ self.TblIdentifier = TableIdentifier(self.Cur)
+ self.TblReport = TableEotReport(self.Cur)
+ self.TblInf = TableInf(self.Cur)
+ self.TblDec = TableDec(self.Cur)
+ self.TblDsc = TableDsc(self.Cur)
+ self.TblFdf = TableFdf(self.Cur)
+ self.TblQuery = TableQuery(self.Cur)
+ self.TblQuery2 = TableQuery(self.Cur)
+ self.TblQuery2.Table = 'Query2'
+
+ # Create new tables
+ if NewDatabase:
+ self.TblDataModel.Create()
+ self.TblFile.Create()
+ self.TblFunction.Create()
+ self.TblReport.Create()
+ self.TblInf.Create()
+ self.TblDec.Create()
+ self.TblDsc.Create()
+ self.TblFdf.Create()
+ self.TblQuery.Create()
+ self.TblQuery2.Create()
+
+ # Init each table's ID
+ self.TblDataModel.InitID()
+ self.TblFile.InitID()
+ self.TblFunction.InitID()
+ self.TblReport.InitID()
+ self.TblInf.InitID()
+ self.TblDec.InitID()
+ self.TblDsc.InitID()
+ self.TblFdf.InitID()
+ self.TblQuery.Drop()
+ self.TblQuery.Create()
+ self.TblQuery.InitID()
+ self.TblQuery2.Drop()
+ self.TblQuery2.Create()
+ self.TblQuery2.InitID()
+
+ # Initialize table DataModel
+ if NewDatabase:
+ self.TblDataModel.InitTable()
+
+ EdkLogger.verbose("Initialize EOT database ... DONE!")
+
+ ## QueryTable() method
+ #
+ # Query a table
+ #
+ # @param self: The object pointer
+ # @param Table: The instance of the table to be queried
+ #
+ def QueryTable(self, Table):
+ Table.Query()
+
+ ## Close() method
+ #
+ # Commit all first
+ # Close the connection and cursor
+ #
+ def Close(self):
+ # Commit to file
+ self.Conn.commit()
+
+ # Close connection and cursor
+ self.Cur.close()
+ self.Conn.close()
+
+ ## InsertOneFile() method
+ #
+ # Insert one file's information to the database
+ # 1. Create a record in TableFile
+ # 2. Create functions one by one
+ # 2.1 Create variables of function one by one
+ # 2.2 Create pcds of function one by one
+ # 3. Create variables one by one
+ # 4. Create pcds one by one
+ #
+ # @param self: The object pointer
+ # @param File: The object of the file to be inserted
+ #
+ def InsertOneFile(self, File):
+ # Insert a record for file
+ FileID = self.TblFile.Insert(File.Name, File.ExtName, File.Path, File.FullPath, Model = File.Model, TimeStamp = File.TimeStamp)
+ IdTable = TableIdentifier(self.Cur)
+ IdTable.Table = "Identifier%s" % FileID
+ IdTable.Create()
+
+ # Insert function of file
+ for Function in File.FunctionList:
+ FunctionID = self.TblFunction.Insert(Function.Header, Function.Modifier, Function.Name, Function.ReturnStatement, \
+ Function.StartLine, Function.StartColumn, Function.EndLine, Function.EndColumn, \
+ Function.BodyStartLine, Function.BodyStartColumn, FileID, \
+ Function.FunNameStartLine, Function.FunNameStartColumn)
+
+ # Insert Identifier of function
+ for Identifier in Function.IdentifierList:
+ IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
+ FileID, FunctionID, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
+ # Insert Identifier of file
+ for Identifier in File.IdentifierList:
+ IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
+ FileID, -1, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
+
+ EdkLogger.verbose("Insert information from file %s ... DONE!" % File.FullPath)
+
+ ## UpdateIdentifierBelongsToFunction() method
+ #
+ # Update the field "BelongsToFunction" for each Identifier
+ #
+ # @param self: The object pointer
+ #
+ def UpdateIdentifierBelongsToFunction(self):
+ EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
+
+ SqlCommand = """select ID, BelongsToFile, StartLine, EndLine from Function"""
+ Records = self.TblFunction.Exec(SqlCommand)
+ Data1 = []
+ Data2 = []
+ for Record in Records:
+ FunctionID = Record[0]
+ BelongsToFile = Record[1]
+ StartLine = Record[2]
+ EndLine = Record[3]
+
+ SqlCommand = """Update Identifier%s set BelongsToFunction = %s where BelongsToFile = %s and StartLine > %s and EndLine < %s""" % \
+ (BelongsToFile, FunctionID, BelongsToFile, StartLine, EndLine)
+ self.TblIdentifier.Exec(SqlCommand)
+
+ SqlCommand = """Update Identifier%s set BelongsToFunction = %s, Model = %s where BelongsToFile = %s and Model = %s and EndLine = %s""" % \
+ (BelongsToFile, FunctionID, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, BelongsToFile, DataClass.MODEL_IDENTIFIER_COMMENT, StartLine - 1)
+ self.TblIdentifier.Exec(SqlCommand)
+
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+ EdkLogger.verbose("Start at " + time.strftime('%H:%M:%S', time.localtime()))
+
+ Db = Database(DATABASE_PATH)
+ Db.InitDatabase()
+ Db.QueryTable(Db.TblDataModel)
+
+ identifier1 = DataClass.IdentifierClass(-1, '', '', "i''1", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 32, 43, 54, 43)
+ identifier2 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 15, 43, 20, 43)
+ identifier3 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 55, 43, 58, 43)
+ identifier4 = DataClass.IdentifierClass(-1, '', '', "i1'", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 77, 43, 88, 43)
+ fun1 = DataClass.FunctionClass(-1, '', '', 'fun1', '', 21, 2, 60, 45, 1, 23, 0, [], [])
+ file = DataClass.FileClass(-1, 'F1', 'c', 'C:\\', 'C:\\F1.exe', DataClass.MODEL_FILE_C, '2007-12-28', [fun1], [identifier1, identifier2, identifier3, identifier4], [])
+ Db.InsertOneFile(file)
+
+ Db.QueryTable(Db.TblFile)
+ Db.QueryTable(Db.TblFunction)
+ Db.QueryTable(Db.TblIdentifier)
+
+ Db.Close()
+ EdkLogger.verbose("End at " + time.strftime('%H:%M:%S', time.localtime()))
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/EotGlobalData.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/EotGlobalData.py
new file mode 100755
index 00000000..7173578d
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/EotGlobalData.py
@@ -0,0 +1,105 @@
+## @file
+# This file is used to save global datas
+#
+# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from collections import OrderedDict
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+gEFI_SOURCE = ''
+gEDK_SOURCE = ''
+gWORKSPACE = ''
+gSHELL_INF = 'Application\Shell'
+gMAKE_FILE = ''
+gDSC_FILE = ''
+gFV_FILE = []
+gFV = []
+gMAP_FILE = []
+gMap = {}
+
+
+gDb = ''
+gIdentifierTableList = []
+
+# Global macro
+gMACRO = {}
+gMACRO['EFI_SOURCE'] = gEFI_SOURCE
+gMACRO['EDK_SOURCE'] = gEDK_SOURCE
+gMACRO['SHELL_INF'] = gSHELL_INF
+gMACRO['CAPSULE_INF'] = ''
+
+# Log file for unmatched variables
+gUN_MATCHED_LOG = 'Log_UnMatched.log'
+gOP_UN_MATCHED = open(gUN_MATCHED_LOG, 'w+')
+
+# Log file for all INF files
+gINF_FILES = 'Log_Inf_File.log'
+gOP_INF = open(gINF_FILES, 'w+')
+
+# Log file for not dispatched PEIM/DRIVER
+gUN_DISPATCHED_LOG = 'Log_UnDispatched.log'
+gOP_UN_DISPATCHED = open(gUN_DISPATCHED_LOG, 'w+')
+
+# Log file for unmatched variables in function calling
+gUN_MATCHED_IN_LIBRARY_CALLING_LOG = 'Log_UnMatchedInLibraryCalling.log'
+gOP_UN_MATCHED_IN_LIBRARY_CALLING = open(gUN_MATCHED_IN_LIBRARY_CALLING_LOG, 'w+')
+
+# Log file for order of dispatched PEIM/DRIVER
+gDISPATCH_ORDER_LOG = 'Log_DispatchOrder.log'
+gOP_DISPATCH_ORDER = open(gDISPATCH_ORDER_LOG, 'w+')
+
+# Log file for found source files
+gSOURCE_FILES = 'Log_SourceFiles.log'
+gOP_SOURCE_FILES = open(gSOURCE_FILES, 'w+')
+
+# Dict for GUID found in DEC files
+gGuidDict = dict()
+
+# Dict for PROTOCOL
+gProtocolList = {}
+# Dict for PPI
+gPpiList = {}
+
+
+# Dict for consumed PPI function calling
+gConsumedPpiLibrary = OrderedDict()
+gConsumedPpiLibrary['EfiCommonLocateInterface'] = 0
+gConsumedPpiLibrary['PeiServicesLocatePpi'] = 0
+
+# Dict for produced PROTOCOL function calling
+gProducedProtocolLibrary = OrderedDict()
+gProducedProtocolLibrary['RegisterEsalClass'] = 0
+gProducedProtocolLibrary['CoreInstallProtocolInterface'] = 1
+gProducedProtocolLibrary['CoreInstallMultipleProtocolInterfaces'] = -1
+gProducedProtocolLibrary['EfiInstallProtocolInterface'] = 1
+gProducedProtocolLibrary['EfiReinstallProtocolInterface'] = 1
+gProducedProtocolLibrary['EfiLibNamedEventSignal'] = 0
+gProducedProtocolLibrary['LibInstallProtocolInterfaces'] = 1
+gProducedProtocolLibrary['LibReinstallProtocolInterfaces'] = 1
+
+# Dict for consumed PROTOCOL function calling
+gConsumedProtocolLibrary = OrderedDict()
+gConsumedProtocolLibrary['EfiHandleProtocol'] = 0
+gConsumedProtocolLibrary['EfiLocateProtocolHandleBuffers'] = 0
+gConsumedProtocolLibrary['EfiLocateProtocolInterface'] = 0
+gConsumedProtocolLibrary['EfiHandleProtocol'] = 1
+
+# Dict for callback PROTOCOL function calling
+gCallbackProtocolLibrary = OrderedDict()
+gCallbackProtocolLibrary['EfiRegisterProtocolCallback'] = 2
+
+gArchProtocolGuids = {'665e3ff6-46cc-11d4-9a38-0090273fc14d',
+ '26baccb1-6f42-11d4-bce7-0080c73c8881',
+ '26baccb2-6f42-11d4-bce7-0080c73c8881',
+ '1da97072-bddc-4b30-99f1-72a0b56fff2a',
+ '27cfac87-46cc-11d4-9a38-0090273fc14d',
+ '27cfac88-46cc-11d4-9a38-0090273fc14d',
+ 'b7dfb4e1-052f-449f-87be-9818fc91b733',
+ 'a46423e3-4617-49f1-b9ff-d1bfa9115839',
+ 'd2b2b828-0826-48a7-b3df-983c006024f0',
+ '26baccb3-6f42-11d4-bce7-0080c73c8881',
+ '1e5668e2-8481-11d4-bcf1-0080c73c8881',
+ '6441f818-6362-4e44-b570-7dba31dd2453',
+ '665e3ff5-46cc-11d4-9a38-0090273fc14d'}
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/EotMain.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/EotMain.py
new file mode 100755
index 00000000..b36a2488
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/EotMain.py
@@ -0,0 +1,1713 @@
+## @file
+# This file is used to be the main entrance of EOT tool
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.LongFilePathOs as os, time, glob
+import Common.EdkLogger as EdkLogger
+import Eot.EotGlobalData as EotGlobalData
+from optparse import OptionParser
+from Common.StringUtils import NormPath
+from Common import BuildToolError
+from Common.Misc import GuidStructureStringToGuidString
+from collections import OrderedDict as sdict
+from Eot.Parser import *
+from Eot.InfParserLite import EdkInfParser
+from Common.StringUtils import GetSplitValueList
+from Eot import c
+from Eot import Database
+from array import array
+from Eot.Report import Report
+from Common.BuildVersion import gBUILD_VERSION
+from Eot.Parser import ConvertGuid
+from Common.LongFilePathSupport import OpenLongFilePath as open
+import struct
+import uuid
+import copy
+import codecs
+from GenFds.AprioriSection import DXE_APRIORI_GUID, PEI_APRIORI_GUID
+
+gGuidStringFormat = "%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X"
+gIndention = -4
+
+class Image(array):
+ _HEADER_ = struct.Struct("")
+ _HEADER_SIZE_ = _HEADER_.size
+
+ def __new__(cls, *args, **kwargs):
+ return array.__new__(cls, 'B')
+
+ def __init__(self, ID=None):
+ if ID is None:
+ self._ID_ = str(uuid.uuid1()).upper()
+ else:
+ self._ID_ = ID
+ self._BUF_ = None
+ self._LEN_ = None
+ self._OFF_ = None
+
+ self._SubImages = sdict() # {offset: Image()}
+
+ array.__init__(self)
+
+ def __repr__(self):
+ return self._ID_
+
+ def __len__(self):
+ Len = array.__len__(self)
+ for Offset in self._SubImages.keys():
+ Len += len(self._SubImages[Offset])
+ return Len
+
+ def _Unpack(self):
+ self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_])
+ return len(self)
+
+ def _Pack(self, PadByte=0xFF):
+ raise NotImplementedError
+
+ def frombuffer(self, Buffer, Offset=0, Size=None):
+ self._BUF_ = Buffer
+ self._OFF_ = Offset
+ # we may need the Size information in advance if it's given
+ self._LEN_ = Size
+ self._LEN_ = self._Unpack()
+
+ def empty(self):
+ del self[0:]
+
+ def GetField(self, FieldStruct, Offset=0):
+ return FieldStruct.unpack_from(self, Offset)
+
+ def SetField(self, FieldStruct, Offset, *args):
+ # check if there's enough space
+ Size = FieldStruct.size
+ if Size > len(self):
+ self.extend([0] * (Size - len(self)))
+ FieldStruct.pack_into(self, Offset, *args)
+
+ def _SetData(self, Data):
+ if len(self) < self._HEADER_SIZE_:
+ self.extend([0] * (self._HEADER_SIZE_ - len(self)))
+ else:
+ del self[self._HEADER_SIZE_:]
+ self.extend(Data)
+
+ def _GetData(self):
+ if len(self) > self._HEADER_SIZE_:
+ return self[self._HEADER_SIZE_:]
+ return None
+
+ Data = property(_GetData, _SetData)
+
+## CompressedImage() class
+#
+# A class for Compressed Image
+#
+class CompressedImage(Image):
+ # UncompressedLength = 4-byte
+ # CompressionType = 1-byte
+ _HEADER_ = struct.Struct("1I 1B")
+ _HEADER_SIZE_ = _HEADER_.size
+
+ _ORIG_SIZE_ = struct.Struct("1I")
+ _CMPRS_TYPE_ = struct.Struct("4x 1B")
+
+ def __init__(self, CompressedData=None, CompressionType=None, UncompressedLength=None):
+ Image.__init__(self)
+ if UncompressedLength is not None:
+ self.UncompressedLength = UncompressedLength
+ if CompressionType is not None:
+ self.CompressionType = CompressionType
+ if CompressedData is not None:
+ self.Data = CompressedData
+
+ def __str__(self):
+ global gIndention
+ S = "algorithm=%s uncompressed=%x" % (self.CompressionType, self.UncompressedLength)
+ for Sec in self.Sections:
+ S += '\n' + str(Sec)
+
+ return S
+
+ def _SetOriginalSize(self, Size):
+ self.SetField(self._ORIG_SIZE_, 0, Size)
+
+ def _GetOriginalSize(self):
+ return self.GetField(self._ORIG_SIZE_)[0]
+
+ def _SetCompressionType(self, Type):
+ self.SetField(self._CMPRS_TYPE_, 0, Type)
+
+ def _GetCompressionType(self):
+ return self.GetField(self._CMPRS_TYPE_)[0]
+
+ def _GetSections(self):
+ try:
+ TmpData = DeCompress('Efi', self[self._HEADER_SIZE_:])
+ DecData = array('B')
+ DecData.fromstring(TmpData)
+ except:
+ TmpData = DeCompress('Framework', self[self._HEADER_SIZE_:])
+ DecData = array('B')
+ DecData.fromstring(TmpData)
+
+ SectionList = []
+ Offset = 0
+ while Offset < len(DecData):
+ Sec = Section()
+ try:
+ Sec.frombuffer(DecData, Offset)
+ Offset += Sec.Size
+ # the section is aligned to 4-byte boundary
+ except:
+ break
+ SectionList.append(Sec)
+ return SectionList
+
+ UncompressedLength = property(_GetOriginalSize, _SetOriginalSize)
+ CompressionType = property(_GetCompressionType, _SetCompressionType)
+ Sections = property(_GetSections)
+
+## Ui() class
+#
+# A class for Ui
+#
+class Ui(Image):
+ _HEADER_ = struct.Struct("")
+ _HEADER_SIZE_ = 0
+
+ def __init__(self):
+ Image.__init__(self)
+
+ def __str__(self):
+ return self.String
+
+ def _Unpack(self):
+ # keep header in this Image object
+ self.empty()
+ self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_])
+ return len(self)
+
+ def _GetUiString(self):
+ return codecs.utf_16_decode(self[0:-2].tostring())[0]
+
+ String = property(_GetUiString)
+
+## Depex() class
+#
+# A class for Depex
+#
+class Depex(Image):
+ _HEADER_ = struct.Struct("")
+ _HEADER_SIZE_ = 0
+
+ _GUID_ = struct.Struct("1I2H8B")
+ _OPCODE_ = struct.Struct("1B")
+
+ _OPCODE_STRING_ = {
+ 0x00 : "BEFORE",
+ 0x01 : "AFTER",
+ 0x02 : "PUSH",
+ 0x03 : "AND",
+ 0x04 : "OR",
+ 0x05 : "NOT",
+ 0x06 : "TRUE",
+ 0x07 : "FALSE",
+ 0x08 : "END",
+ 0x09 : "SOR"
+ }
+
+ _NEXT_ = {
+ -1 : _OPCODE_, # first one in depex must be an opcdoe
+ 0x00 : _GUID_, #"BEFORE",
+ 0x01 : _GUID_, #"AFTER",
+ 0x02 : _GUID_, #"PUSH",
+ 0x03 : _OPCODE_, #"AND",
+ 0x04 : _OPCODE_, #"OR",
+ 0x05 : _OPCODE_, #"NOT",
+ 0x06 : _OPCODE_, #"TRUE",
+ 0x07 : _OPCODE_, #"FALSE",
+ 0x08 : None, #"END",
+ 0x09 : _OPCODE_, #"SOR"
+ }
+
+ def __init__(self):
+ Image.__init__(self)
+ self._ExprList = []
+
+ def __str__(self):
+ global gIndention
+ gIndention += 4
+ Indention = ' ' * gIndention
+ S = '\n'
+ for T in self.Expression:
+ if T in self._OPCODE_STRING_:
+ S += Indention + self._OPCODE_STRING_[T]
+ if T not in [0x00, 0x01, 0x02]:
+ S += '\n'
+ else:
+ S += ' ' + gGuidStringFormat % T + '\n'
+ gIndention -= 4
+ return S
+
+ def _Unpack(self):
+ # keep header in this Image object
+ self.empty()
+ self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_])
+ return len(self)
+
+ def _GetExpression(self):
+ if self._ExprList == []:
+ Offset = 0
+ CurrentData = self._OPCODE_
+ while Offset < len(self):
+ Token = CurrentData.unpack_from(self, Offset)
+ Offset += CurrentData.size
+ if len(Token) == 1:
+ Token = Token[0]
+ if Token in self._NEXT_:
+ CurrentData = self._NEXT_[Token]
+ else:
+ CurrentData = self._GUID_
+ else:
+ CurrentData = self._OPCODE_
+ self._ExprList.append(Token)
+ if CurrentData is None:
+ break
+ return self._ExprList
+
+ Expression = property(_GetExpression)
+
+# # FirmwareVolume() class
+#
+# A class for Firmware Volume
+#
+class FirmwareVolume(Image):
+ # Read FvLength, Attributes, HeaderLength, Checksum
+ _HEADER_ = struct.Struct("16x 1I2H8B 1Q 4x 1I 1H 1H")
+ _HEADER_SIZE_ = _HEADER_.size
+
+ _FfsGuid = "8C8CE578-8A3D-4F1C-9935-896185C32DD3"
+
+ _GUID_ = struct.Struct("16x 1I2H8B")
+ _LENGTH_ = struct.Struct("16x 16x 1Q")
+ _SIG_ = struct.Struct("16x 16x 8x 1I")
+ _ATTR_ = struct.Struct("16x 16x 8x 4x 1I")
+ _HLEN_ = struct.Struct("16x 16x 8x 4x 4x 1H")
+ _CHECKSUM_ = struct.Struct("16x 16x 8x 4x 4x 2x 1H")
+
+ def __init__(self, Name=''):
+ Image.__init__(self)
+ self.Name = Name
+ self.FfsDict = sdict()
+ self.OrderedFfsDict = sdict()
+ self.UnDispatchedFfsDict = sdict()
+ self.ProtocolList = sdict()
+
+ def CheckArchProtocol(self):
+ for Item in EotGlobalData.gArchProtocolGuids:
+ if Item.lower() not in EotGlobalData.gProtocolList:
+ return False
+ return True
+
+ def ParseDepex(self, Depex, Type):
+ List = None
+ if Type == 'Ppi':
+ List = EotGlobalData.gPpiList
+ if Type == 'Protocol':
+ List = EotGlobalData.gProtocolList
+ DepexStack = []
+ DepexList = []
+ DepexString = ''
+ FileDepex = None
+ CouldBeLoaded = True
+ for Index in range(0, len(Depex.Expression)):
+ Item = Depex.Expression[Index]
+ if Item == 0x00:
+ Index = Index + 1
+ Guid = gGuidStringFormat % Depex.Expression[Index]
+ if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08:
+ return (True, 'BEFORE %s' % Guid, [Guid, 'BEFORE'])
+ elif Item == 0x01:
+ Index = Index + 1
+ Guid = gGuidStringFormat % Depex.Expression[Index]
+ if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08:
+ return (True, 'AFTER %s' % Guid, [Guid, 'AFTER'])
+ elif Item == 0x02:
+ Index = Index + 1
+ Guid = gGuidStringFormat % Depex.Expression[Index]
+ if Guid.lower() in List:
+ DepexStack.append(True)
+ DepexList.append(Guid)
+ else:
+ DepexStack.append(False)
+ DepexList.append(Guid)
+ continue
+ elif Item == 0x03 or Item == 0x04:
+ DepexStack.append(eval(str(DepexStack.pop()) + ' ' + Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop())))
+ DepexList.append(str(DepexList.pop()) + ' ' + Depex._OPCODE_STRING_[Item].upper() + ' ' + str(DepexList.pop()))
+ elif Item == 0x05:
+ DepexStack.append(eval(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop())))
+ DepexList.append(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexList.pop()))
+ elif Item == 0x06:
+ DepexStack.append(True)
+ DepexList.append('TRUE')
+ DepexString = DepexString + 'TRUE' + ' '
+ elif Item == 0x07:
+ DepexStack.append(False)
+ DepexList.append('False')
+ DepexString = DepexString + 'FALSE' + ' '
+ elif Item == 0x08:
+ if Index != len(Depex.Expression) - 1:
+ CouldBeLoaded = False
+ else:
+ CouldBeLoaded = DepexStack.pop()
+ else:
+ CouldBeLoaded = False
+ if DepexList != []:
+ DepexString = DepexList[0].strip()
+ return (CouldBeLoaded, DepexString, FileDepex)
+
+ def Dispatch(self, Db=None):
+ if Db is None:
+ return False
+ self.UnDispatchedFfsDict = copy.copy(self.FfsDict)
+ # Find PeiCore, DexCore, PeiPriori, DxePriori first
+ FfsSecCoreGuid = None
+ FfsPeiCoreGuid = None
+ FfsDxeCoreGuid = None
+ FfsPeiPrioriGuid = None
+ FfsDxePrioriGuid = None
+ for FfsID in list(self.UnDispatchedFfsDict.keys()):
+ Ffs = self.UnDispatchedFfsDict[FfsID]
+ if Ffs.Type == 0x03:
+ FfsSecCoreGuid = FfsID
+ continue
+ if Ffs.Type == 0x04:
+ FfsPeiCoreGuid = FfsID
+ continue
+ if Ffs.Type == 0x05:
+ FfsDxeCoreGuid = FfsID
+ continue
+ if Ffs.Guid.lower() == PEI_APRIORI_GUID.lower():
+ FfsPeiPrioriGuid = FfsID
+ continue
+ if Ffs.Guid.lower() == DXE_APRIORI_GUID.lower():
+ FfsDxePrioriGuid = FfsID
+ continue
+
+ # Parse SEC_CORE first
+ if FfsSecCoreGuid is not None:
+ self.OrderedFfsDict[FfsSecCoreGuid] = self.UnDispatchedFfsDict.pop(FfsSecCoreGuid)
+ self.LoadPpi(Db, FfsSecCoreGuid)
+
+ # Parse PEI first
+ if FfsPeiCoreGuid is not None:
+ self.OrderedFfsDict[FfsPeiCoreGuid] = self.UnDispatchedFfsDict.pop(FfsPeiCoreGuid)
+ self.LoadPpi(Db, FfsPeiCoreGuid)
+ if FfsPeiPrioriGuid is not None:
+ # Load PEIM described in priori file
+ FfsPeiPriori = self.UnDispatchedFfsDict.pop(FfsPeiPrioriGuid)
+ if len(FfsPeiPriori.Sections) == 1:
+ Section = FfsPeiPriori.Sections.popitem()[1]
+ if Section.Type == 0x19:
+ GuidStruct = struct.Struct('1I2H8B')
+ Start = 4
+ while len(Section) > Start:
+ Guid = GuidStruct.unpack_from(Section[Start : Start + 16])
+ GuidString = gGuidStringFormat % Guid
+ Start = Start + 16
+ if GuidString in self.UnDispatchedFfsDict:
+ self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
+ self.LoadPpi(Db, GuidString)
+
+ self.DisPatchPei(Db)
+
+ # Parse DXE then
+ if FfsDxeCoreGuid is not None:
+ self.OrderedFfsDict[FfsDxeCoreGuid] = self.UnDispatchedFfsDict.pop(FfsDxeCoreGuid)
+ self.LoadProtocol(Db, FfsDxeCoreGuid)
+ if FfsDxePrioriGuid is not None:
+ # Load PEIM described in priori file
+ FfsDxePriori = self.UnDispatchedFfsDict.pop(FfsDxePrioriGuid)
+ if len(FfsDxePriori.Sections) == 1:
+ Section = FfsDxePriori.Sections.popitem()[1]
+ if Section.Type == 0x19:
+ GuidStruct = struct.Struct('1I2H8B')
+ Start = 4
+ while len(Section) > Start:
+ Guid = GuidStruct.unpack_from(Section[Start : Start + 16])
+ GuidString = gGuidStringFormat % Guid
+ Start = Start + 16
+ if GuidString in self.UnDispatchedFfsDict:
+ self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
+ self.LoadProtocol(Db, GuidString)
+
+ self.DisPatchDxe(Db)
+
+ def LoadProtocol(self, Db, ModuleGuid):
+ SqlCommand = """select GuidValue from Report
+ where SourceFileFullPath in
+ (select Value1 from Inf where BelongsToFile =
+ (select BelongsToFile from Inf
+ where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s)
+ and Model = %s)
+ and ItemType = 'Protocol' and ItemMode = 'Produced'""" \
+ % (ModuleGuid, 5001, 3007)
+ RecordSet = Db.TblReport.Exec(SqlCommand)
+ for Record in RecordSet:
+ SqlCommand = """select Value2 from Inf where BelongsToFile =
+ (select DISTINCT BelongsToFile from Inf
+ where Value1 =
+ (select SourceFileFullPath from Report
+ where GuidValue like '%s' and ItemMode = 'Callback'))
+ and Value1 = 'FILE_GUID'""" % Record[0]
+ CallBackSet = Db.TblReport.Exec(SqlCommand)
+ if CallBackSet != []:
+ EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid
+ else:
+ EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid
+
+ def LoadPpi(self, Db, ModuleGuid):
+ SqlCommand = """select GuidValue from Report
+ where SourceFileFullPath in
+ (select Value1 from Inf where BelongsToFile =
+ (select BelongsToFile from Inf
+ where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s)
+ and Model = %s)
+ and ItemType = 'Ppi' and ItemMode = 'Produced'""" \
+ % (ModuleGuid, 5001, 3007)
+ RecordSet = Db.TblReport.Exec(SqlCommand)
+ for Record in RecordSet:
+ EotGlobalData.gPpiList[Record[0].lower()] = ModuleGuid
+
+ def DisPatchDxe(self, Db):
+ IsInstalled = False
+ ScheduleList = sdict()
+ for FfsID in list(self.UnDispatchedFfsDict.keys()):
+ CouldBeLoaded = False
+ DepexString = ''
+ FileDepex = None
+ Ffs = self.UnDispatchedFfsDict[FfsID]
+ if Ffs.Type == 0x07:
+ # Get Depex
+ IsFoundDepex = False
+ for Section in Ffs.Sections.values():
+ # Find Depex
+ if Section.Type == 0x13:
+ IsFoundDepex = True
+ CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Protocol')
+ break
+ if Section.Type == 0x01:
+ CompressSections = Section._SubImages[4]
+ for CompressSection in CompressSections.Sections:
+ if CompressSection.Type == 0x13:
+ IsFoundDepex = True
+ CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Protocol')
+ break
+ if CompressSection.Type == 0x02:
+ NewSections = CompressSection._SubImages[4]
+ for NewSection in NewSections.Sections:
+ if NewSection.Type == 0x13:
+ IsFoundDepex = True
+ CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Protocol')
+ break
+
+ # Not find Depex
+ if not IsFoundDepex:
+ CouldBeLoaded = self.CheckArchProtocol()
+ DepexString = ''
+ FileDepex = None
+
+ # Append New Ffs
+ if CouldBeLoaded:
+ IsInstalled = True
+ NewFfs = self.UnDispatchedFfsDict.pop(FfsID)
+ NewFfs.Depex = DepexString
+ if FileDepex is not None:
+ ScheduleList.insert(FileDepex[1], FfsID, NewFfs, FileDepex[0])
+ else:
+ ScheduleList[FfsID] = NewFfs
+ else:
+ self.UnDispatchedFfsDict[FfsID].Depex = DepexString
+
+ for FfsID in ScheduleList.keys():
+ NewFfs = ScheduleList.pop(FfsID)
+ FfsName = 'UnKnown'
+ self.OrderedFfsDict[FfsID] = NewFfs
+ self.LoadProtocol(Db, FfsID)
+
+ SqlCommand = """select Value2 from Inf
+ where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and lower(Value2) = lower('%s') and Model = %s)
+ and Model = %s and Value1='BASE_NAME'""" % (FfsID, 5001, 5001)
+ RecordSet = Db.TblReport.Exec(SqlCommand)
+ if RecordSet != []:
+ FfsName = RecordSet[0][0]
+
+ if IsInstalled:
+ self.DisPatchDxe(Db)
+
+ def DisPatchPei(self, Db):
+ IsInstalled = False
+ for FfsID in list(self.UnDispatchedFfsDict.keys()):
+ CouldBeLoaded = True
+ DepexString = ''
+ FileDepex = None
+ Ffs = self.UnDispatchedFfsDict[FfsID]
+ if Ffs.Type == 0x06 or Ffs.Type == 0x08:
+ # Get Depex
+ for Section in Ffs.Sections.values():
+ if Section.Type == 0x1B:
+ CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Ppi')
+ break
+ if Section.Type == 0x01:
+ CompressSections = Section._SubImages[4]
+ for CompressSection in CompressSections.Sections:
+ if CompressSection.Type == 0x1B:
+ CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Ppi')
+ break
+ if CompressSection.Type == 0x02:
+ NewSections = CompressSection._SubImages[4]
+ for NewSection in NewSections.Sections:
+ if NewSection.Type == 0x1B:
+ CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Ppi')
+ break
+
+ # Append New Ffs
+ if CouldBeLoaded:
+ IsInstalled = True
+ NewFfs = self.UnDispatchedFfsDict.pop(FfsID)
+ NewFfs.Depex = DepexString
+ self.OrderedFfsDict[FfsID] = NewFfs
+ self.LoadPpi(Db, FfsID)
+ else:
+ self.UnDispatchedFfsDict[FfsID].Depex = DepexString
+
+ if IsInstalled:
+ self.DisPatchPei(Db)
+
+
+ def __str__(self):
+ global gIndention
+ gIndention += 4
+ FvInfo = '\n' + ' ' * gIndention
+ FvInfo += "[FV:%s] file_system=%s size=%x checksum=%s\n" % (self.Name, self.FileSystemGuid, self.Size, self.Checksum)
+ FfsInfo = "\n".join([str(self.FfsDict[FfsId]) for FfsId in self.FfsDict])
+ gIndention -= 4
+ return FvInfo + FfsInfo
+
+ def _Unpack(self):
+ Size = self._LENGTH_.unpack_from(self._BUF_, self._OFF_)[0]
+ self.empty()
+ self.extend(self._BUF_[self._OFF_:self._OFF_ + Size])
+
+ # traverse the FFS
+ EndOfFv = Size
+ FfsStartAddress = self.HeaderSize
+ LastFfsObj = None
+ while FfsStartAddress < EndOfFv:
+ FfsObj = Ffs()
+ FfsObj.frombuffer(self, FfsStartAddress)
+ FfsId = repr(FfsObj)
+ if ((self.Attributes & 0x00000800) != 0 and len(FfsObj) == 0xFFFFFF) \
+ or ((self.Attributes & 0x00000800) == 0 and len(FfsObj) == 0):
+ if LastFfsObj is not None:
+ LastFfsObj.FreeSpace = EndOfFv - LastFfsObj._OFF_ - len(LastFfsObj)
+ else:
+ if FfsId in self.FfsDict:
+ EdkLogger.error("FV", 0, "Duplicate GUID in FFS",
+ ExtraData="\t%s @ %s\n\t%s @ %s" \
+ % (FfsObj.Guid, FfsObj.Offset,
+ self.FfsDict[FfsId].Guid, self.FfsDict[FfsId].Offset))
+ self.FfsDict[FfsId] = FfsObj
+ if LastFfsObj is not None:
+ LastFfsObj.FreeSpace = FfsStartAddress - LastFfsObj._OFF_ - len(LastFfsObj)
+
+ FfsStartAddress += len(FfsObj)
+ #
+ # align to next 8-byte aligned address: A = (A + 8 - 1) & (~(8 - 1))
+ # The next FFS must be at the latest next 8-byte aligned address
+ #
+ FfsStartAddress = (FfsStartAddress + 7) & (~7)
+ LastFfsObj = FfsObj
+
+ def _GetAttributes(self):
+ return self.GetField(self._ATTR_, 0)[0]
+
+ def _GetSize(self):
+ return self.GetField(self._LENGTH_, 0)[0]
+
+ def _GetChecksum(self):
+ return self.GetField(self._CHECKSUM_, 0)[0]
+
+ def _GetHeaderLength(self):
+ return self.GetField(self._HLEN_, 0)[0]
+
+ def _GetFileSystemGuid(self):
+ return gGuidStringFormat % self.GetField(self._GUID_, 0)
+
+ Attributes = property(_GetAttributes)
+ Size = property(_GetSize)
+ Checksum = property(_GetChecksum)
+ HeaderSize = property(_GetHeaderLength)
+ FileSystemGuid = property(_GetFileSystemGuid)
+
+## GuidDefinedImage() class
+#
+# A class for GUID Defined Image
+#
+class GuidDefinedImage(Image):
+ _HEADER_ = struct.Struct("1I2H8B 1H 1H")
+ _HEADER_SIZE_ = _HEADER_.size
+
+ _GUID_ = struct.Struct("1I2H8B")
+ _DATA_OFFSET_ = struct.Struct("16x 1H")
+ _ATTR_ = struct.Struct("18x 1H")
+
+ CRC32_GUID = "FC1BCDB0-7D31-49AA-936A-A4600D9DD083"
+ TIANO_COMPRESS_GUID = 'A31280AD-481E-41B6-95E8-127F4C984779'
+ LZMA_COMPRESS_GUID = 'EE4E5898-3914-4259-9D6E-DC7BD79403CF'
+
+ def __init__(self, SectionDefinitionGuid=None, DataOffset=None, Attributes=None, Data=None):
+ Image.__init__(self)
+ if SectionDefinitionGuid is not None:
+ self.SectionDefinitionGuid = SectionDefinitionGuid
+ if DataOffset is not None:
+ self.DataOffset = DataOffset
+ if Attributes is not None:
+ self.Attributes = Attributes
+ if Data is not None:
+ self.Data = Data
+
+ def __str__(self):
+ S = "guid=%s" % (gGuidStringFormat % self.SectionDefinitionGuid)
+ for Sec in self.Sections:
+ S += "\n" + str(Sec)
+ return S
+
+ def _Unpack(self):
+ # keep header in this Image object
+ self.empty()
+ self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_])
+ return len(self)
+
+ def _SetAttribute(self, Attribute):
+ self.SetField(self._ATTR_, 0, Attribute)
+
+ def _GetAttribute(self):
+ return self.GetField(self._ATTR_)[0]
+
+ def _SetGuid(self, Guid):
+ self.SetField(self._GUID_, 0, Guid)
+
+ def _GetGuid(self):
+ return self.GetField(self._GUID_)
+
+ def _SetDataOffset(self, Offset):
+ self.SetField(self._DATA_OFFSET_, 0, Offset)
+
+ def _GetDataOffset(self):
+ return self.GetField(self._DATA_OFFSET_)[0]
+
+ def _GetSections(self):
+ SectionList = []
+ Guid = gGuidStringFormat % self.SectionDefinitionGuid
+ if Guid == self.CRC32_GUID:
+ # skip the CRC32 value, we don't do CRC32 verification here
+ Offset = self.DataOffset - 4
+ while Offset < len(self):
+ Sec = Section()
+ try:
+ Sec.frombuffer(self, Offset)
+ Offset += Sec.Size
+ # the section is aligned to 4-byte boundary
+ Offset = (Offset + 3) & (~3)
+ except:
+ break
+ SectionList.append(Sec)
+ elif Guid == self.TIANO_COMPRESS_GUID:
+ try:
+ # skip the header
+ Offset = self.DataOffset - 4
+ TmpData = DeCompress('Framework', self[self.Offset:])
+ DecData = array('B')
+ DecData.fromstring(TmpData)
+ Offset = 0
+ while Offset < len(DecData):
+ Sec = Section()
+ try:
+ Sec.frombuffer(DecData, Offset)
+ Offset += Sec.Size
+ # the section is aligned to 4-byte boundary
+ Offset = (Offset + 3) & (~3)
+ except:
+ break
+ SectionList.append(Sec)
+ except:
+ pass
+ elif Guid == self.LZMA_COMPRESS_GUID:
+ try:
+ # skip the header
+ Offset = self.DataOffset - 4
+
+ TmpData = DeCompress('Lzma', self[self.Offset:])
+ DecData = array('B')
+ DecData.fromstring(TmpData)
+ Offset = 0
+ while Offset < len(DecData):
+ Sec = Section()
+ try:
+ Sec.frombuffer(DecData, Offset)
+ Offset += Sec.Size
+ # the section is aligned to 4-byte boundary
+ Offset = (Offset + 3) & (~3)
+ except:
+ break
+ SectionList.append(Sec)
+ except:
+ pass
+
+ return SectionList
+
+ Attributes = property(_GetAttribute, _SetAttribute)
+ SectionDefinitionGuid = property(_GetGuid, _SetGuid)
+ DataOffset = property(_GetDataOffset, _SetDataOffset)
+ Sections = property(_GetSections)
+
+## Section() class
+#
+# A class for Section
+#
+class Section(Image):
+ _TypeName = {
+ 0x00 : "<unknown>",
+ 0x01 : "COMPRESSION",
+ 0x02 : "GUID_DEFINED",
+ 0x10 : "PE32",
+ 0x11 : "PIC",
+ 0x12 : "TE",
+ 0x13 : "DXE_DEPEX",
+ 0x14 : "VERSION",
+ 0x15 : "USER_INTERFACE",
+ 0x16 : "COMPATIBILITY16",
+ 0x17 : "FIRMWARE_VOLUME_IMAGE",
+ 0x18 : "FREEFORM_SUBTYPE_GUID",
+ 0x19 : "RAW",
+ 0x1B : "PEI_DEPEX"
+ }
+
+ _SectionSubImages = {
+ 0x01 : CompressedImage,
+ 0x02 : GuidDefinedImage,
+ 0x17 : FirmwareVolume,
+ 0x13 : Depex,
+ 0x1B : Depex,
+ 0x15 : Ui
+ }
+
+ # Size = 3-byte
+ # Type = 1-byte
+ _HEADER_ = struct.Struct("3B 1B")
+ _HEADER_SIZE_ = _HEADER_.size
+
+ # SubTypeGuid
+ # _FREE_FORM_SUBTYPE_GUID_HEADER_ = struct.Struct("1I2H8B")
+ _SIZE_ = struct.Struct("3B")
+ _TYPE_ = struct.Struct("3x 1B")
+
+ def __init__(self, Type=None, Size=None):
+ Image.__init__(self)
+ self._Alignment = 1
+ if Type is not None:
+ self.Type = Type
+ if Size is not None:
+ self.Size = Size
+
+ def __str__(self):
+ global gIndention
+ gIndention += 4
+ SectionInfo = ' ' * gIndention
+ if self.Type in self._TypeName:
+ SectionInfo += "[SECTION:%s] offset=%x size=%x" % (self._TypeName[self.Type], self._OFF_, self.Size)
+ else:
+ SectionInfo += "[SECTION:%x<unknown>] offset=%x size=%x " % (self.Type, self._OFF_, self.Size)
+ for Offset in self._SubImages.keys():
+ SectionInfo += ", " + str(self._SubImages[Offset])
+ gIndention -= 4
+ return SectionInfo
+
+ def _Unpack(self):
+ self.empty()
+ Type, = self._TYPE_.unpack_from(self._BUF_, self._OFF_)
+ Size1, Size2, Size3 = self._SIZE_.unpack_from(self._BUF_, self._OFF_)
+ Size = Size1 + (Size2 << 8) + (Size3 << 16)
+
+ if Type not in self._SectionSubImages:
+ # no need to extract sub-image, keep all in this Image object
+ self.extend(self._BUF_[self._OFF_ : self._OFF_ + Size])
+ else:
+ # keep header in this Image object
+ self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._HEADER_SIZE_])
+ #
+ # use new Image object to represent payload, which may be another kind
+ # of image such as PE32
+ #
+ PayloadOffset = self._HEADER_SIZE_
+ PayloadLen = self.Size - self._HEADER_SIZE_
+ Payload = self._SectionSubImages[self.Type]()
+ Payload.frombuffer(self._BUF_, self._OFF_ + self._HEADER_SIZE_, PayloadLen)
+ self._SubImages[PayloadOffset] = Payload
+
+ return Size
+
+ def _SetSize(self, Size):
+ Size1 = Size & 0xFF
+ Size2 = (Size & 0xFF00) >> 8
+ Size3 = (Size & 0xFF0000) >> 16
+ self.SetField(self._SIZE_, 0, Size1, Size2, Size3)
+
+ def _GetSize(self):
+ Size1, Size2, Size3 = self.GetField(self._SIZE_)
+ return Size1 + (Size2 << 8) + (Size3 << 16)
+
+ def _SetType(self, Type):
+ self.SetField(self._TYPE_, 0, Type)
+
+ def _GetType(self):
+ return self.GetField(self._TYPE_)[0]
+
+ def _GetAlignment(self):
+ return self._Alignment
+
+ def _SetAlignment(self, Alignment):
+ self._Alignment = Alignment
+ AlignmentMask = Alignment - 1
+ # section alignment is actually for payload, so we need to add header size
+ PayloadOffset = self._OFF_ + self._HEADER_SIZE_
+ if (PayloadOffset & (~AlignmentMask)) == 0:
+ return
+ NewOffset = (PayloadOffset + AlignmentMask) & (~AlignmentMask)
+ while (NewOffset - PayloadOffset) < self._HEADER_SIZE_:
+ NewOffset += self._Alignment
+
+ def tofile(self, f):
+ self.Size = len(self)
+ Image.tofile(self, f)
+ for Offset in self._SubImages:
+ self._SubImages[Offset].tofile(f)
+
+ Type = property(_GetType, _SetType)
+ Size = property(_GetSize, _SetSize)
+ Alignment = property(_GetAlignment, _SetAlignment)
+
+## Ffs() class
+#
+# A class for Ffs Section
+#
+class Ffs(Image):
+ _FfsFormat = "24B%(payload_size)sB"
+ # skip IntegrityCheck
+ _HEADER_ = struct.Struct("1I2H8B 2x 1B 1B 3B 1B")
+ _HEADER_SIZE_ = _HEADER_.size
+
+ _NAME_ = struct.Struct("1I2H8B")
+ _INT_CHECK_ = struct.Struct("16x 1H")
+ _TYPE_ = struct.Struct("18x 1B")
+ _ATTR_ = struct.Struct("19x 1B")
+ _SIZE_ = struct.Struct("20x 3B")
+ _STATE_ = struct.Struct("23x 1B")
+
+ FFS_ATTRIB_FIXED = 0x04
+ FFS_ATTRIB_DATA_ALIGNMENT = 0x38
+ FFS_ATTRIB_CHECKSUM = 0x40
+
+ _TypeName = {
+ 0x00 : "<unknown>",
+ 0x01 : "RAW",
+ 0x02 : "FREEFORM",
+ 0x03 : "SECURITY_CORE",
+ 0x04 : "PEI_CORE",
+ 0x05 : "DXE_CORE",
+ 0x06 : "PEIM",
+ 0x07 : "DRIVER",
+ 0x08 : "COMBINED_PEIM_DRIVER",
+ 0x09 : "APPLICATION",
+ 0x0A : "SMM",
+ 0x0B : "FIRMWARE_VOLUME_IMAGE",
+ 0x0C : "COMBINED_SMM_DXE",
+ 0x0D : "SMM_CORE",
+ 0x0E : "MM_STANDALONE",
+ 0x0F : "MM_CORE_STANDALONE",
+ 0xc0 : "OEM_MIN",
+ 0xdf : "OEM_MAX",
+ 0xe0 : "DEBUG_MIN",
+ 0xef : "DEBUG_MAX",
+ 0xf0 : "FFS_MIN",
+ 0xff : "FFS_MAX",
+ 0xf0 : "FFS_PAD",
+ }
+
+ def __init__(self):
+ Image.__init__(self)
+ self.FreeSpace = 0
+
+ self.Sections = sdict()
+ self.Depex = ''
+
+ self.__ID__ = None
+
+ def __str__(self):
+ global gIndention
+ gIndention += 4
+ Indention = ' ' * gIndention
+ FfsInfo = Indention
+ FfsInfo += "[FFS:%s] offset=%x size=%x guid=%s free_space=%x alignment=%s\n" % \
+ (Ffs._TypeName[self.Type], self._OFF_, self.Size, self.Guid, self.FreeSpace, self.Alignment)
+ SectionInfo = '\n'.join([str(self.Sections[Offset]) for Offset in self.Sections.keys()])
+ gIndention -= 4
+ return FfsInfo + SectionInfo + "\n"
+
+ def __len__(self):
+ return self.Size
+
+ def __repr__(self):
+ return self.__ID__
+
+ def _Unpack(self):
+ Size1, Size2, Size3 = self._SIZE_.unpack_from(self._BUF_, self._OFF_)
+ Size = Size1 + (Size2 << 8) + (Size3 << 16)
+ self.empty()
+ self.extend(self._BUF_[self._OFF_ : self._OFF_ + Size])
+
+ # Pad FFS may use the same GUID. We need to avoid it.
+ if self.Type == 0xf0:
+ self.__ID__ = str(uuid.uuid1()).upper()
+ else:
+ self.__ID__ = self.Guid
+
+ # Traverse the SECTION. RAW and PAD do not have sections
+ if self.Type not in [0xf0, 0x01] and Size > 0 and Size < 0xFFFFFF:
+ EndOfFfs = Size
+ SectionStartAddress = self._HEADER_SIZE_
+ while SectionStartAddress < EndOfFfs:
+ SectionObj = Section()
+ SectionObj.frombuffer(self, SectionStartAddress)
+ #f = open(repr(SectionObj), 'wb')
+ #SectionObj.Size = 0
+ #SectionObj.tofile(f)
+ #f.close()
+ self.Sections[SectionStartAddress] = SectionObj
+ SectionStartAddress += len(SectionObj)
+ SectionStartAddress = (SectionStartAddress + 3) & (~3)
+
+ def Pack(self):
+ pass
+
+ def SetFreeSpace(self, Size):
+ self.FreeSpace = Size
+
+ def _GetGuid(self):
+ return gGuidStringFormat % self.Name
+
+ def _SetName(self, Value):
+ # Guid1, Guid2, Guid3, Guid4, Guid5, Guid6, Guid7, Guid8, Guid9, Guid10, Guid11
+ self.SetField(self._NAME_, 0, Value)
+
+ def _GetName(self):
+ # Guid1, Guid2, Guid3, Guid4, Guid5, Guid6, Guid7, Guid8, Guid9, Guid10, Guid11
+ return self.GetField(self._NAME_)
+
+ def _SetSize(self, Size):
+ Size1 = Size & 0xFF
+ Size2 = (Size & 0xFF00) >> 8
+ Size3 = (Size & 0xFF0000) >> 16
+ self.SetField(self._SIZE_, 0, Size1, Size2, Size3)
+
+ def _GetSize(self):
+ Size1, Size2, Size3 = self.GetField(self._SIZE_)
+ return Size1 + (Size2 << 8) + (Size3 << 16)
+
+ def _SetType(self, Type):
+ self.SetField(self._TYPE_, 0, Type)
+
+ def _GetType(self):
+ return self.GetField(self._TYPE_)[0]
+
+ def _SetAttributes(self, Value):
+ self.SetField(self._ATTR_, 0, Value)
+
+ def _GetAttributes(self):
+ return self.GetField(self._ATTR_)[0]
+
+ def _GetFixed(self):
+ if (self.Attributes & self.FFS_ATTRIB_FIXED) != 0:
+ return True
+ return False
+
+ def _GetCheckSum(self):
+ if (self.Attributes & self.FFS_ATTRIB_CHECKSUM) != 0:
+ return True
+ return False
+
+ def _GetAlignment(self):
+ return (self.Attributes & self.FFS_ATTRIB_DATA_ALIGNMENT) >> 3
+
+ def _SetState(self, Value):
+ self.SetField(self._STATE_, 0, Value)
+
+ def _GetState(self):
+ return self.GetField(self._STATE_)[0]
+
+ Name = property(_GetName, _SetName)
+ Guid = property(_GetGuid)
+ Type = property(_GetType, _SetType)
+ Size = property(_GetSize, _SetSize)
+ Attributes = property(_GetAttributes, _SetAttributes)
+ Fixed = property(_GetFixed)
+ Checksum = property(_GetCheckSum)
+ Alignment = property(_GetAlignment)
+ State = property(_GetState, _SetState)
+
+
+## MultipleFv() class
+#
+# A class for Multiple FV
+#
+class MultipleFv(FirmwareVolume):
+ def __init__(self, FvList):
+ FirmwareVolume.__init__(self)
+ self.BasicInfo = []
+ for FvPath in FvList:
+ Fd = None
+ FvName = os.path.splitext(os.path.split(FvPath)[1])[0]
+ if FvPath.strip():
+ Fd = open(FvPath, 'rb')
+ Buf = array('B')
+ try:
+ Buf.fromfile(Fd, os.path.getsize(FvPath))
+ except EOFError:
+ pass
+
+ Fv = FirmwareVolume(FvName)
+ Fv.frombuffer(Buf, 0, len(Buf))
+
+ self.BasicInfo.append([Fv.Name, Fv.FileSystemGuid, Fv.Size])
+ self.FfsDict.update(Fv.FfsDict)
+
+## Class Eot
+#
+# This class is used to define Eot main entrance
+#
+# @param object: Inherited from object class
+#
+class Eot(object):
+ ## The constructor
+ #
+ # @param self: The object pointer
+ #
+ def __init__(self, CommandLineOption=True, IsInit=True, SourceFileList=None, \
+ IncludeDirList=None, DecFileList=None, GuidList=None, LogFile=None,
+ FvFileList="", MapFileList="", Report='Report.html', Dispatch=None):
+ # Version and Copyright
+ self.VersionNumber = ("0.02" + " " + gBUILD_VERSION)
+ self.Version = "%prog Version " + self.VersionNumber
+ self.Copyright = "Copyright (c) 2008 - 2018, Intel Corporation All rights reserved."
+ self.Report = Report
+
+ self.IsInit = IsInit
+ self.SourceFileList = SourceFileList
+ self.IncludeDirList = IncludeDirList
+ self.DecFileList = DecFileList
+ self.GuidList = GuidList
+ self.LogFile = LogFile
+ self.FvFileList = FvFileList
+ self.MapFileList = MapFileList
+ self.Dispatch = Dispatch
+
+ # Check workspace environment
+ if "EFI_SOURCE" not in os.environ:
+ if "EDK_SOURCE" not in os.environ:
+ pass
+ else:
+ EotGlobalData.gEDK_SOURCE = os.path.normpath(os.getenv("EDK_SOURCE"))
+ else:
+ EotGlobalData.gEFI_SOURCE = os.path.normpath(os.getenv("EFI_SOURCE"))
+ EotGlobalData.gEDK_SOURCE = os.path.join(EotGlobalData.gEFI_SOURCE, 'Edk')
+
+ if "WORKSPACE" not in os.environ:
+ EdkLogger.error("EOT", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
+ ExtraData="WORKSPACE")
+ else:
+ EotGlobalData.gWORKSPACE = os.path.normpath(os.getenv("WORKSPACE"))
+
+ EotGlobalData.gMACRO['WORKSPACE'] = EotGlobalData.gWORKSPACE
+ EotGlobalData.gMACRO['EFI_SOURCE'] = EotGlobalData.gEFI_SOURCE
+ EotGlobalData.gMACRO['EDK_SOURCE'] = EotGlobalData.gEDK_SOURCE
+
+ # Parse the options and args
+ if CommandLineOption:
+ self.ParseOption()
+
+ if self.FvFileList:
+ for FvFile in GetSplitValueList(self.FvFileList, ' '):
+ FvFile = os.path.normpath(FvFile)
+ if not os.path.isfile(FvFile):
+ EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "Can not find file %s " % FvFile)
+ EotGlobalData.gFV_FILE.append(FvFile)
+ else:
+ EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "The fv file list of target platform was not specified")
+
+ if self.MapFileList:
+ for MapFile in GetSplitValueList(self.MapFileList, ' '):
+ MapFile = os.path.normpath(MapFile)
+ if not os.path.isfile(MapFile):
+ EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "Can not find file %s " % MapFile)
+ EotGlobalData.gMAP_FILE.append(MapFile)
+
+ # Generate source file list
+ self.GenerateSourceFileList(self.SourceFileList, self.IncludeDirList)
+
+ # Generate guid list of dec file list
+ self.ParseDecFile(self.DecFileList)
+
+ # Generate guid list from GUID list file
+ self.ParseGuidList(self.GuidList)
+
+ # Init Eot database
+ EotGlobalData.gDb = Database.Database(Database.DATABASE_PATH)
+ EotGlobalData.gDb.InitDatabase(self.IsInit)
+
+ # Build ECC database
+ self.BuildDatabase()
+
+ # Parse Ppi/Protocol
+ self.ParseExecutionOrder()
+
+ # Merge Identifier tables
+ self.GenerateQueryTable()
+
+ # Generate report database
+ self.GenerateReportDatabase()
+
+ # Load Fv Info
+ self.LoadFvInfo()
+
+ # Load Map Info
+ self.LoadMapInfo()
+
+ # Generate Report
+ self.GenerateReport()
+
+ # Convert log file
+ self.ConvertLogFile(self.LogFile)
+
+ # DONE
+ EdkLogger.quiet("EOT FINISHED!")
+
+ # Close Database
+ EotGlobalData.gDb.Close()
+
+ ## ParseDecFile() method
+ #
+ # parse DEC file and get all GUID names with GUID values as {GuidName : GuidValue}
+ # The Dict is stored in EotGlobalData.gGuidDict
+ #
+ # @param self: The object pointer
+ # @param DecFileList: A list of all DEC files
+ #
+ def ParseDecFile(self, DecFileList):
+ if DecFileList:
+ path = os.path.normpath(DecFileList)
+ lfr = open(path, 'rb')
+ for line in lfr:
+ path = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip()))
+ if os.path.exists(path):
+ dfr = open(path, 'rb')
+ for line in dfr:
+ line = CleanString(line)
+ list = line.split('=')
+ if len(list) == 2:
+ EotGlobalData.gGuidDict[list[0].strip()] = GuidStructureStringToGuidString(list[1].strip())
+
+
+ ## ParseGuidList() method
+ #
+ # Parse Guid list and get all GUID names with GUID values as {GuidName : GuidValue}
+ # The Dict is stored in EotGlobalData.gGuidDict
+ #
+ # @param self: The object pointer
+ # @param GuidList: A list of all GUID and its value
+ #
+ def ParseGuidList(self, GuidList):
+ Path = os.path.join(EotGlobalData.gWORKSPACE, GuidList)
+ if os.path.isfile(Path):
+ for Line in open(Path):
+ if Line.strip():
+ (GuidName, GuidValue) = Line.split()
+ EotGlobalData.gGuidDict[GuidName] = GuidValue
+
+ ## ConvertLogFile() method
+ #
+ # Parse a real running log file to get real dispatch order
+ # The result is saved to old file name + '.new'
+ #
+ # @param self: The object pointer
+ # @param LogFile: A real running log file name
+ #
+ def ConvertLogFile(self, LogFile):
+ newline = []
+ lfr = None
+ lfw = None
+ if LogFile:
+ lfr = open(LogFile, 'rb')
+ lfw = open(LogFile + '.new', 'wb')
+ for line in lfr:
+ line = line.strip()
+ line = line.replace('.efi', '')
+ index = line.find("Loading PEIM at ")
+ if index > -1:
+ newline.append(line[index + 55 : ])
+ continue
+ index = line.find("Loading driver at ")
+ if index > -1:
+ newline.append(line[index + 57 : ])
+ continue
+
+ for line in newline:
+ lfw.write(line + '\r\n')
+
+ if lfr:
+ lfr.close()
+ if lfw:
+ lfw.close()
+
+ ## GenerateSourceFileList() method
+ #
+ # Generate a list of all source files
+ # 1. Search the file list one by one
+ # 2. Store inf file name with source file names under it like
+ # { INF file name: [source file1, source file2, ...]}
+ # 3. Search the include list to find all .h files
+ # 4. Store source file list to EotGlobalData.gSOURCE_FILES
+ # 5. Store INF file list to EotGlobalData.gINF_FILES
+ #
+ # @param self: The object pointer
+ # @param SourceFileList: A list of all source files
+ # @param IncludeFileList: A list of all include files
+ #
+ def GenerateSourceFileList(self, SourceFileList, IncludeFileList):
+ EdkLogger.quiet("Generating source files list ... ")
+ mSourceFileList = []
+ mInfFileList = []
+ mDecFileList = []
+ mFileList = {}
+ mCurrentInfFile = ''
+ mCurrentSourceFileList = []
+
+ if SourceFileList:
+ sfl = open(SourceFileList, 'r')
+ for line in sfl:
+ line = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip()))
+ if line[-2:].upper() == '.C' or line[-2:].upper() == '.H':
+ if line not in mCurrentSourceFileList:
+ mCurrentSourceFileList.append(line)
+ mSourceFileList.append(line)
+ EotGlobalData.gOP_SOURCE_FILES.write('%s\n' % line)
+ if line[-4:].upper() == '.INF':
+ if mCurrentInfFile != '':
+ mFileList[mCurrentInfFile] = mCurrentSourceFileList
+ mCurrentSourceFileList = []
+ mCurrentInfFile = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line))
+ EotGlobalData.gOP_INF.write('%s\n' % mCurrentInfFile)
+ if mCurrentInfFile not in mFileList:
+ mFileList[mCurrentInfFile] = mCurrentSourceFileList
+
+ # Get all include files from packages
+ if IncludeFileList:
+ ifl = open(IncludeFileList, 'rb')
+ for line in ifl:
+ if not line.strip():
+ continue
+ newline = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip()))
+ for Root, Dirs, Files in os.walk(str(newline)):
+ for File in Files:
+ FullPath = os.path.normpath(os.path.join(Root, File))
+ if FullPath not in mSourceFileList and File[-2:].upper() == '.H':
+ mSourceFileList.append(FullPath)
+ EotGlobalData.gOP_SOURCE_FILES.write('%s\n' % FullPath)
+ if FullPath not in mDecFileList and File.upper().find('.DEC') > -1:
+ mDecFileList.append(FullPath)
+
+ EotGlobalData.gSOURCE_FILES = mSourceFileList
+ EotGlobalData.gOP_SOURCE_FILES.close()
+
+ EotGlobalData.gINF_FILES = mFileList
+ EotGlobalData.gOP_INF.close()
+
+ ## GenerateReport() method
+ #
+ # Generate final HTML report
+ #
+ # @param self: The object pointer
+ #
+ def GenerateReport(self):
+ EdkLogger.quiet("Generating report file ... ")
+ Rep = Report(self.Report, EotGlobalData.gFV, self.Dispatch)
+ Rep.GenerateReport()
+
+ ## LoadMapInfo() method
+ #
+ # Load map files and parse them
+ #
+ # @param self: The object pointer
+ #
+ def LoadMapInfo(self):
+ if EotGlobalData.gMAP_FILE != []:
+ EdkLogger.quiet("Parsing Map file ... ")
+ EotGlobalData.gMap = ParseMapFile(EotGlobalData.gMAP_FILE)
+
+ ## LoadFvInfo() method
+ #
+ # Load FV binary files and parse them
+ #
+ # @param self: The object pointer
+ #
+ def LoadFvInfo(self):
+ EdkLogger.quiet("Parsing FV file ... ")
+ EotGlobalData.gFV = MultipleFv(EotGlobalData.gFV_FILE)
+ EotGlobalData.gFV.Dispatch(EotGlobalData.gDb)
+
+ for Protocol in EotGlobalData.gProtocolList:
+ EotGlobalData.gOP_UN_MATCHED_IN_LIBRARY_CALLING.write('%s\n' %Protocol)
+
+ ## GenerateReportDatabase() method
+ #
+ # Generate data for the information needed by report
+ # 1. Update name, macro and value of all found PPI/PROTOCOL GUID
+ # 2. Install hard coded PPI/PROTOCOL
+ #
+ # @param self: The object pointer
+ #
+ def GenerateReportDatabase(self):
+ EdkLogger.quiet("Generating the cross-reference table of GUID for Ppi/Protocol ... ")
+
+ # Update Protocol/Ppi Guid
+ SqlCommand = """select DISTINCT GuidName from Report"""
+ RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ for Record in RecordSet:
+ GuidName = Record[0]
+ GuidMacro = ''
+ GuidMacro2 = ''
+ GuidValue = ''
+
+ # Find guid value defined in Dec file
+ if GuidName in EotGlobalData.gGuidDict:
+ GuidValue = EotGlobalData.gGuidDict[GuidName]
+ SqlCommand = """update Report set GuidMacro = '%s', GuidValue = '%s' where GuidName = '%s'""" %(GuidMacro, GuidValue, GuidName)
+ EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ continue
+
+ # Search defined Macros for guid name
+ SqlCommand ="""select DISTINCT Value, Modifier from Query where Name like '%s'""" % GuidName
+ GuidMacroSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ # Ignore NULL result
+ if not GuidMacroSet:
+ continue
+ GuidMacro = GuidMacroSet[0][0].strip()
+ if not GuidMacro:
+ continue
+ # Find Guid value of Guid Macro
+ SqlCommand ="""select DISTINCT Value from Query2 where Value like '%%%s%%' and Model = %s""" % (GuidMacro, MODEL_IDENTIFIER_MACRO_DEFINE)
+ GuidValueSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ if GuidValueSet != []:
+ GuidValue = GuidValueSet[0][0]
+ GuidValue = GuidValue[GuidValue.find(GuidMacro) + len(GuidMacro) :]
+ GuidValue = GuidValue.lower().replace('\\', '').replace('\r', '').replace('\n', '').replace('l', '').strip()
+ GuidValue = GuidStructureStringToGuidString(GuidValue)
+ SqlCommand = """update Report set GuidMacro = '%s', GuidValue = '%s' where GuidName = '%s'""" %(GuidMacro, GuidValue, GuidName)
+ EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ continue
+
+ # Update Hard Coded Ppi/Protocol
+ SqlCommand = """select DISTINCT GuidValue, ItemType from Report where ModuleID = -2 and ItemMode = 'Produced'"""
+ RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ for Record in RecordSet:
+ if Record[1] == 'Ppi':
+ EotGlobalData.gPpiList[Record[0].lower()] = -2
+ if Record[1] == 'Protocol':
+ EotGlobalData.gProtocolList[Record[0].lower()] = -2
+
+ ## GenerateQueryTable() method
+ #
+ # Generate two tables improve query performance
+ #
+ # @param self: The object pointer
+ #
+ def GenerateQueryTable(self):
+ EdkLogger.quiet("Generating temp query table for analysis ... ")
+ for Identifier in EotGlobalData.gIdentifierTableList:
+ SqlCommand = """insert into Query (Name, Modifier, Value, Model)
+ select Name, Modifier, Value, Model from %s where (Model = %s or Model = %s)""" \
+ % (Identifier[0], MODEL_IDENTIFIER_VARIABLE, MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION)
+ EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ SqlCommand = """insert into Query2 (Name, Modifier, Value, Model)
+ select Name, Modifier, Value, Model from %s where Model = %s""" \
+ % (Identifier[0], MODEL_IDENTIFIER_MACRO_DEFINE)
+ EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+
+ ## ParseExecutionOrder() method
+ #
+ # Get final execution order
+ # 1. Search all PPI
+ # 2. Search all PROTOCOL
+ #
+ # @param self: The object pointer
+ #
+ def ParseExecutionOrder(self):
+ EdkLogger.quiet("Searching Ppi/Protocol ... ")
+ for Identifier in EotGlobalData.gIdentifierTableList:
+ ModuleID, ModuleName, ModuleGuid, SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, Enabled = \
+ -1, '', '', -1, '', '', '', '', '', '', '', '', 0
+
+ SourceFileID = Identifier[0].replace('Identifier', '')
+ SourceFileFullPath = Identifier[1]
+ Identifier = Identifier[0]
+
+ # Find Ppis
+ ItemMode = 'Produced'
+ SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
+ where (Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
+ % (Identifier, '.InstallPpi', '->InstallPpi', 'PeiInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING)
+ SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode)
+
+ ItemMode = 'Produced'
+ SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
+ where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
+ % (Identifier, '.ReInstallPpi', '->ReInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING)
+ SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 2)
+
+ SearchPpiCallFunction(Identifier, SourceFileID, SourceFileFullPath, ItemMode)
+
+ ItemMode = 'Consumed'
+ SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
+ where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
+ % (Identifier, '.LocatePpi', '->LocatePpi', MODEL_IDENTIFIER_FUNCTION_CALLING)
+ SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode)
+
+ SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Ppi', ItemMode)
+
+ ItemMode = 'Callback'
+ SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
+ where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
+ % (Identifier, '.NotifyPpi', '->NotifyPpi', MODEL_IDENTIFIER_FUNCTION_CALLING)
+ SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode)
+
+ # Find Protocols
+ ItemMode = 'Produced'
+ SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
+ where (Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
+ % (Identifier, '.InstallProtocolInterface', '.ReInstallProtocolInterface', '->InstallProtocolInterface', '->ReInstallProtocolInterface', MODEL_IDENTIFIER_FUNCTION_CALLING)
+ SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 1)
+
+ SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
+ where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
+ % (Identifier, '.InstallMultipleProtocolInterfaces', '->InstallMultipleProtocolInterfaces', MODEL_IDENTIFIER_FUNCTION_CALLING)
+ SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 2)
+
+ SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode)
+
+ ItemMode = 'Consumed'
+ SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
+ where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
+ % (Identifier, '.LocateProtocol', '->LocateProtocol', MODEL_IDENTIFIER_FUNCTION_CALLING)
+ SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 0)
+
+ SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
+ where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
+ % (Identifier, '.HandleProtocol', '->HandleProtocol', MODEL_IDENTIFIER_FUNCTION_CALLING)
+ SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 1)
+
+ SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode)
+
+ ItemMode = 'Callback'
+ SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
+ where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
+ % (Identifier, '.RegisterProtocolNotify', '->RegisterProtocolNotify', MODEL_IDENTIFIER_FUNCTION_CALLING)
+ SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 0)
+
+ SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode)
+
+ # Hard Code
+ EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gEfiSecPlatformInformationPpiGuid', '', '', '', 0)
+ EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gEfiNtLoadAsDllPpiGuid', '', '', '', 0)
+ EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gNtPeiLoadFileGuid', '', '', '', 0)
+ EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiNtAutoScanPpiGuid', '', '', '', 0)
+ EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gNtFwhPpiGuid', '', '', '', 0)
+ EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiNtThunkPpiGuid', '', '', '', 0)
+ EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiPlatformTypePpiGuid', '', '', '', 0)
+ EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiFrequencySelectionCpuPpiGuid', '', '', '', 0)
+ EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiCachePpiGuid', '', '', '', 0)
+
+ EotGlobalData.gDb.Conn.commit()
+
+
+ ## BuildDatabase() methoc
+ #
+ # Build the database for target
+ #
+ # @param self: The object pointer
+ #
+ def BuildDatabase(self):
+ # Clean report table
+ EotGlobalData.gDb.TblReport.Drop()
+ EotGlobalData.gDb.TblReport.Create()
+
+ # Build database
+ if self.IsInit:
+ self.BuildMetaDataFileDatabase(EotGlobalData.gINF_FILES)
+ EdkLogger.quiet("Building database for source code ...")
+ c.CreateCCodeDB(EotGlobalData.gSOURCE_FILES)
+ EdkLogger.quiet("Building database for source code done!")
+
+ EotGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EotGlobalData.gDb)
+
+ ## BuildMetaDataFileDatabase() method
+ #
+ # Build the database for meta data files
+ #
+ # @param self: The object pointer
+ # @param Inf_Files: A list for all INF files
+ #
+ def BuildMetaDataFileDatabase(self, Inf_Files):
+ EdkLogger.quiet("Building database for meta data files ...")
+ for InfFile in Inf_Files:
+ if not InfFile:
+ continue
+ EdkLogger.quiet("Parsing %s ..." % str(InfFile))
+ EdkInfParser(InfFile, EotGlobalData.gDb, Inf_Files[InfFile])
+
+ EotGlobalData.gDb.Conn.commit()
+ EdkLogger.quiet("Building database for meta data files done!")
+
+ ## ParseOption() method
+ #
+ # Parse command line options
+ #
+ # @param self: The object pointer
+ #
+ def ParseOption(self):
+ (Options, Target) = self.EotOptionParser()
+
+ # Set log level
+ self.SetLogLevel(Options)
+
+ if Options.FvFileList:
+ self.FvFileList = Options.FvFileList
+
+ if Options.MapFileList:
+ self.MapFileList = Options.FvMapFileList
+
+ if Options.SourceFileList:
+ self.SourceFileList = Options.SourceFileList
+
+ if Options.IncludeDirList:
+ self.IncludeDirList = Options.IncludeDirList
+
+ if Options.DecFileList:
+ self.DecFileList = Options.DecFileList
+
+ if Options.GuidList:
+ self.GuidList = Options.GuidList
+
+ if Options.LogFile:
+ self.LogFile = Options.LogFile
+
+ if Options.keepdatabase:
+ self.IsInit = False
+
+ ## SetLogLevel() method
+ #
+ # Set current log level of the tool based on args
+ #
+ # @param self: The object pointer
+ # @param Option: The option list including log level setting
+ #
+ def SetLogLevel(self, Option):
+ if Option.verbose is not None:
+ EdkLogger.SetLevel(EdkLogger.VERBOSE)
+ elif Option.quiet is not None:
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ elif Option.debug is not None:
+ EdkLogger.SetLevel(Option.debug + 1)
+ else:
+ EdkLogger.SetLevel(EdkLogger.INFO)
+
+ ## EotOptionParser() method
+ #
+ # Using standard Python module optparse to parse command line option of this tool.
+ #
+ # @param self: The object pointer
+ #
+ # @retval Opt A optparse.Values object containing the parsed options
+ # @retval Args Target of build command
+ #
+ def EotOptionParser(self):
+ Parser = OptionParser(description = self.Copyright, version = self.Version, prog = "Eot.exe", usage = "%prog [options]")
+ Parser.add_option("-m", "--makefile filename", action="store", type="string", dest='MakeFile',
+ help="Specify a makefile for the platform.")
+ Parser.add_option("-c", "--dsc filename", action="store", type="string", dest="DscFile",
+ help="Specify a dsc file for the platform.")
+ Parser.add_option("-f", "--fv filename", action="store", type="string", dest="FvFileList",
+ help="Specify fv file list, quoted by \"\".")
+ Parser.add_option("-a", "--map filename", action="store", type="string", dest="MapFileList",
+ help="Specify map file list, quoted by \"\".")
+ Parser.add_option("-s", "--source files", action="store", type="string", dest="SourceFileList",
+ help="Specify source file list by a file")
+ Parser.add_option("-i", "--include dirs", action="store", type="string", dest="IncludeDirList",
+ help="Specify include dir list by a file")
+ Parser.add_option("-e", "--dec files", action="store", type="string", dest="DecFileList",
+ help="Specify dec file list by a file")
+ Parser.add_option("-g", "--guid list", action="store", type="string", dest="GuidList",
+ help="Specify guid file list by a file")
+ Parser.add_option("-l", "--log filename", action="store", type="string", dest="LogFile",
+ help="Specify real execution log file")
+
+ Parser.add_option("-k", "--keepdatabase", action="store_true", type=None, help="The existing Eot database will not be cleaned except report information if this option is specified.")
+
+ Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
+ Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed, "\
+ "including library instances selected, final dependency expression, "\
+ "and warning messages, etc.")
+ Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
+
+ (Opt, Args)=Parser.parse_args()
+
+ return (Opt, Args)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ # Initialize log system
+ EdkLogger.Initialize()
+ EdkLogger.IsRaiseError = False
+ EdkLogger.quiet(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n")
+
+ StartTime = time.clock()
+ Eot = Eot(CommandLineOption=False,
+ SourceFileList=r'C:\TestEot\Source.txt',
+ GuidList=r'C:\TestEot\Guid.txt',
+ FvFileList=r'C:\TestEot\FVRECOVERY.Fv')
+ FinishTime = time.clock()
+
+ BuildDuration = time.strftime("%M:%S", time.gmtime(int(round(FinishTime - StartTime))))
+ EdkLogger.quiet("\n%s [%s]" % (time.strftime("%H:%M:%S, %b.%d %Y", time.localtime()), BuildDuration))
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/EotToolError.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/EotToolError.py
new file mode 100644
index 00000000..aebe8e8e
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/EotToolError.py
@@ -0,0 +1,15 @@
+## @file
+# Standardized Error Handling infrastructures.
+#
+# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+# Error id
+ERROR_1 = 1000
+
+# Error message
+gEccErrorMessage = {
+ ERROR_1 : "RESERVED"
+ }
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/FileProfile.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/FileProfile.py
new file mode 100755
index 00000000..d35affd7
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/FileProfile.py
@@ -0,0 +1,54 @@
+## @file
+# fragments of source file
+#
+# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+
+from __future__ import absolute_import
+import re
+import Common.LongFilePathOs as os
+from .ParserWarning import Warning
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+# Profile contents of a file
+PPDirectiveList = []
+AssignmentExpressionList = []
+PredicateExpressionList = []
+FunctionDefinitionList = []
+VariableDeclarationList = []
+EnumerationDefinitionList = []
+StructUnionDefinitionList = []
+TypedefDefinitionList = []
+FunctionCallingList = []
+
+## Class FileProfile
+#
+# record file data when parsing source
+#
+# May raise Exception when opening file.
+#
+class FileProfile :
+
+ ## The constructor
+ #
+ # @param self: The object pointer
+ # @param FileName: The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.FileLinesList = []
+ self.FileLinesListFromFile = []
+ try:
+ fsock = open(FileName, "rb", 0)
+ try:
+ self.FileLinesListFromFile = fsock.readlines()
+ finally:
+ fsock.close()
+
+ except IOError:
+ raise Warning("Error when opening file %s" % FileName)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Identification.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Identification.py
new file mode 100755
index 00000000..6a59306f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Identification.py
@@ -0,0 +1,52 @@
+## @file
+# This file is used to define the identification of INF/DEC/DSC files
+#
+# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+## Identification
+#
+# This class defined basic Identification information structure which is used by INF/DEC/DSC files
+#
+# @param object: Inherited from object class
+#
+# @var FileName: To store data for Filename
+# @var FileFullPath: To store data for full path of the file
+# @var FileRelativePath: To store data for relative path of the file
+# @var RunStatus: Status of build system running
+#
+class Identification(object):
+ def __init__(self):
+ self.FileName = ''
+ self.FileFullPath = ''
+ self.FileRelativePath = ''
+ self.PackagePath = ''
+
+ ## GetFileName
+ #
+ # Reserved
+ #
+ def GetFileName(self, FileFullPath, FileRelativePath):
+ pass
+
+ ## GetFileName
+ #
+ # Reserved
+ #
+ def GetFileFullPath(self, FileName, FileRelativePath):
+ pass
+
+ ## GetFileName
+ #
+ # Reserved
+ #
+ def GetFileRelativePath(self, FileName, FileFullPath):
+ pass
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ id = Identification()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/InfParserLite.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/InfParserLite.py
new file mode 100755
index 00000000..9a571d54
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/InfParserLite.py
@@ -0,0 +1,148 @@
+## @file
+# This file is used to parse INF file of EDK project
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+
+import Common.LongFilePathOs as os
+import Common.EdkLogger as EdkLogger
+from Common.DataType import *
+from CommonDataClass.DataClass import *
+from Eot.Identification import Identification
+from Common.StringUtils import *
+from Eot.Parser import *
+from Eot import Database
+from Eot import EotGlobalData
+
+## EdkInfParser() class
+#
+# This class defined basic INF object which is used by inheriting
+#
+# @param object: Inherited from object class
+#
+class EdkInfParser(object):
+ ## The constructor
+ #
+ # @param self: The object pointer
+ # @param Filename: INF file name
+ # @param Database: Eot database
+ # @param SourceFileList: A list for all source file belonging this INF file
+ #
+ def __init__(self, Filename = None, Database = None, SourceFileList = None):
+ self.Identification = Identification()
+ self.Sources = []
+ self.Macros = {}
+
+ self.Cur = Database.Cur
+ self.TblFile = Database.TblFile
+ self.TblInf = Database.TblInf
+ self.FileID = -1
+
+ # Load Inf file if filename is not None
+ if Filename is not None:
+ self.LoadInfFile(Filename)
+
+ if SourceFileList:
+ for Item in SourceFileList:
+ self.TblInf.Insert(MODEL_EFI_SOURCE_FILE, Item, '', '', '', '', 'COMMON', -1, self.FileID, -1, -1, -1, -1, 0)
+
+
+ ## LoadInffile() method
+ #
+ # Load INF file and insert a record in database
+ #
+ # @param self: The object pointer
+ # @param Filename: Input value for filename of Inf file
+ #
+ def LoadInfFile(self, Filename = None):
+ # Insert a record for file
+ Filename = NormPath(Filename)
+ self.Identification.FileFullPath = Filename
+ (self.Identification.FileRelativePath, self.Identification.FileName) = os.path.split(Filename)
+
+ self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_INF)
+
+ self.ParseInf(PreProcess(Filename, False), self.Identification.FileRelativePath, Filename)
+
+ ## ParserSource() method
+ #
+ # Parse Source section and insert records in database
+ #
+ # @param self: The object pointer
+ # @param CurrentSection: current section name
+ # @param SectionItemList: the item belonging current section
+ # @param ArchList: A list for arch for this section
+ # @param ThirdList: A list for third item for this section
+ #
+ def ParserSource(self, CurrentSection, SectionItemList, ArchList, ThirdList):
+ for Index in range(0, len(ArchList)):
+ Arch = ArchList[Index]
+ Third = ThirdList[Index]
+ if Arch == '':
+ Arch = TAB_ARCH_COMMON
+
+ for Item in SectionItemList:
+ if CurrentSection.upper() == 'defines'.upper():
+ (Name, Value) = AddToSelfMacro(self.Macros, Item[0])
+ self.TblInf.Insert(MODEL_META_DATA_HEADER, Name, Value, Third, '', '', Arch, -1, self.FileID, Item[1], -1, Item[1], -1, 0)
+
+ ## ParseInf() method
+ #
+ # Parse INF file and get sections information
+ #
+ # @param self: The object pointer
+ # @param Lines: contents of INF file
+ # @param FileRelativePath: relative path of the file
+ # @param Filename: file name of INF file
+ #
+ def ParseInf(self, Lines = [], FileRelativePath = '', Filename = ''):
+ IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
+ [], [], TAB_UNKNOWN, [], [], []
+ LineNo = 0
+
+ for Line in Lines:
+ LineNo = LineNo + 1
+ if Line == '':
+ continue
+ if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
+ self.ParserSource(CurrentSection, SectionItemList, ArchList, ThirdList)
+
+ # Parse the new section
+ SectionItemList = []
+ ArchList = []
+ ThirdList = []
+ # Parse section name
+ CurrentSection = ''
+ LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
+ for Item in LineList:
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ if CurrentSection == '':
+ CurrentSection = ItemList[0]
+ else:
+ if CurrentSection != ItemList[0]:
+ EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo)
+ ItemList.append('')
+ ItemList.append('')
+ if len(ItemList) > 5:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ else:
+ ArchList.append(ItemList[1].upper())
+ ThirdList.append(ItemList[2])
+
+ continue
+
+ # Add a section item
+ SectionItemList.append([Line, LineNo])
+ # End of parse
+
+ self.ParserSource(CurrentSection, SectionItemList, ArchList, ThirdList)
+ #End of For
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Parser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Parser.py
new file mode 100755
index 00000000..2b780c1f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Parser.py
@@ -0,0 +1,869 @@
+## @file
+# This file is used to define common parsing related functions used in parsing
+# Inf/Dsc/Makefile process
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.LongFilePathOs as os, re
+import Common.EdkLogger as EdkLogger
+from Common.DataType import *
+from CommonDataClass.DataClass import *
+from Common.StringUtils import CleanString, GetSplitValueList, ReplaceMacro
+from . import EotGlobalData
+from Common.StringUtils import GetSplitList
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+import subprocess
+
+## DeCompress
+#
+# Call external decompress tool to decompress the fv section
+#
+def DeCompress(Method, Input):
+ # Write the input to a temp file
+ open('_Temp.bin', 'wb').write(Input)
+ cmd = ''
+ if Method == 'Lzma':
+ cmd = r'LzmaCompress -o _New.bin -d _Temp.bin'
+ if Method == 'Efi':
+ cmd = r'TianoCompress -d --uefi -o _New.bin _Temp.bin'
+ if Method == 'Framework':
+ cmd = r'TianoCompress -d -o _New.bin _Temp.bin'
+
+ # Call tool to create the decompressed output file
+ Process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ Process.communicate()[0]
+
+ # Return the beffer of New.bin
+ if os.path.exists('_New.bin'):
+ return open('_New.bin', 'rb').read()
+
+
+## PreProcess() method
+#
+# Pre process a file
+#
+# 1. Remove all comments
+# 2. Merge multiple lines code to one line
+#
+# @param Filename: Name of the file to be parsed
+# @param MergeMultipleLines: Switch for if merge multiple lines
+# @param LineNo: Default line no
+#
+# @return Lines: The file contents after removing comments
+#
+def PreProcess(Filename, MergeMultipleLines = True, LineNo = -1):
+ Lines = []
+ Filename = os.path.normpath(Filename)
+ if not os.path.isfile(Filename):
+ EdkLogger.error("Eot", EdkLogger.FILE_NOT_FOUND, ExtraData=Filename)
+
+ IsFindBlockComment = False
+ IsFindBlockCode = False
+ ReservedLine = ''
+ ReservedLineLength = 0
+ for Line in open(Filename, 'r'):
+ Line = Line.strip()
+ # Remove comment block
+ if Line.find(TAB_COMMENT_EDK_START) > -1:
+ ReservedLine = GetSplitList(Line, TAB_COMMENT_EDK_START, 1)[0]
+ IsFindBlockComment = True
+ if Line.find(TAB_COMMENT_EDK_END) > -1:
+ Line = ReservedLine + GetSplitList(Line, TAB_COMMENT_EDK_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ Lines.append('')
+ continue
+
+ # Remove comments at tail and remove spaces again
+ Line = CleanString(Line)
+ if Line == '':
+ Lines.append('')
+ continue
+
+ if MergeMultipleLines:
+ # Add multiple lines to one line
+ if IsFindBlockCode and Line[-1] != TAB_SLASH:
+ ReservedLine = (ReservedLine + TAB_SPACE_SPLIT + Line).strip()
+ Lines.append(ReservedLine)
+ for Index in (0, ReservedLineLength):
+ Lines.append('')
+ ReservedLine = ''
+ ReservedLineLength = 0
+ IsFindBlockCode = False
+ continue
+ if Line[-1] == TAB_SLASH:
+ ReservedLine = ReservedLine + TAB_SPACE_SPLIT + Line[0:-1].strip()
+ ReservedLineLength = ReservedLineLength + 1
+ IsFindBlockCode = True
+ continue
+
+ Lines.append(Line)
+
+ return Lines
+
+## AddToGlobalMacro() method
+#
+# Add a macro to EotGlobalData.gMACRO
+#
+# @param Name: Name of the macro
+# @param Value: Value of the macro
+#
+def AddToGlobalMacro(Name, Value):
+ Value = ReplaceMacro(Value, EotGlobalData.gMACRO, True)
+ EotGlobalData.gMACRO[Name] = Value
+
+## AddToSelfMacro() method
+#
+# Parse a line of macro definition and add it to a macro set
+#
+# @param SelfMacro: The self macro set
+# @param Line: The line of a macro definition
+#
+# @return Name: Name of macro
+# @return Value: Value of macro
+#
+def AddToSelfMacro(SelfMacro, Line):
+ Name, Value = '', ''
+ List = GetSplitValueList(Line, TAB_EQUAL_SPLIT, 1)
+ if len(List) == 2:
+ Name = List[0]
+ Value = List[1]
+ Value = ReplaceMacro(Value, EotGlobalData.gMACRO, True)
+ Value = ReplaceMacro(Value, SelfMacro, True)
+ SelfMacro[Name] = Value
+
+ return (Name, Value)
+
+## GetIncludeListOfFile() method
+#
+# Get the include path list for a source file
+#
+# 1. Find the source file belongs to which INF file
+# 2. Find the inf's package
+# 3. Return the include path list of the package
+#
+# @param WorkSpace: WORKSPACE path
+# @param Filepath: File path
+# @param Db: Eot database
+#
+# @return IncludeList: A list of include directories
+#
+def GetIncludeListOfFile(WorkSpace, Filepath, Db):
+ IncludeList = []
+ Filepath = os.path.normpath(Filepath)
+ SqlCommand = """
+ select Value1 from Inf where Model = %s and BelongsToFile in(
+ select distinct B.BelongsToFile from File as A left join Inf as B
+ where A.ID = B.BelongsToFile and B.Model = %s and (A.Path || '%s' || B.Value1) = '%s')""" \
+ % (MODEL_META_DATA_PACKAGE, MODEL_EFI_SOURCE_FILE, '\\', Filepath)
+ RecordSet = Db.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ DecFullPath = os.path.normpath(os.path.join(WorkSpace, Record[0]))
+ (DecPath, DecName) = os.path.split(DecFullPath)
+ SqlCommand = """select Value1 from Dec where BelongsToFile =
+ (select ID from File where FullPath = '%s') and Model = %s""" \
+ % (DecFullPath, MODEL_EFI_INCLUDE)
+ NewRecordSet = Db.TblDec.Exec(SqlCommand)
+ for NewRecord in NewRecordSet:
+ IncludePath = os.path.normpath(os.path.join(DecPath, NewRecord[0]))
+ if IncludePath not in IncludeList:
+ IncludeList.append(IncludePath)
+
+ return IncludeList
+
+## GetTableList() method
+#
+# Search table file and find all small tables
+#
+# @param FileModelList: Model code for the file list
+# @param Table: Table to insert records
+# @param Db: Eot database
+#
+# @return TableList: A list of tables
+#
+def GetTableList(FileModelList, Table, Db):
+ TableList = []
+ SqlCommand = """select ID, FullPath from File where Model in %s""" % str(FileModelList)
+ RecordSet = Db.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ TableName = Table + str(Record[0])
+ TableList.append([TableName, Record[1]])
+
+ return TableList
+
+## GetAllIncludeDir() method
+#
+# Find all Include directories
+#
+# @param Db: Eot database
+#
+# @return IncludeList: A list of include directories
+#
+def GetAllIncludeDirs(Db):
+ IncludeList = []
+ SqlCommand = """select distinct Value1 from Inf where Model = %s order by Value1""" % MODEL_EFI_INCLUDE
+ RecordSet = Db.TblInf.Exec(SqlCommand)
+
+ for Record in RecordSet:
+ IncludeList.append(Record[0])
+
+ return IncludeList
+
+## GetAllIncludeFiles() method
+#
+# Find all Include files
+#
+# @param Db: Eot database
+#
+# @return IncludeFileList: A list of include files
+#
+def GetAllIncludeFiles(Db):
+ IncludeList = GetAllIncludeDirs(Db)
+ IncludeFileList = []
+
+ for Dir in IncludeList:
+ if os.path.isdir(Dir):
+ SubDir = os.listdir(Dir)
+ for Item in SubDir:
+ if os.path.isfile(Item):
+ IncludeFileList.append(Item)
+
+ return IncludeFileList
+
+## GetAllSourceFiles() method
+#
+# Find all source files
+#
+# @param Db: Eot database
+#
+# @return SourceFileList: A list of source files
+#
+def GetAllSourceFiles(Db):
+ SourceFileList = []
+ SqlCommand = """select distinct Value1 from Inf where Model = %s order by Value1""" % MODEL_EFI_SOURCE_FILE
+ RecordSet = Db.TblInf.Exec(SqlCommand)
+
+ for Record in RecordSet:
+ SourceFileList.append(Record[0])
+
+ return SourceFileList
+
+## GetAllFiles() method
+#
+# Find all files, both source files and include files
+#
+# @param Db: Eot database
+#
+# @return FileList: A list of files
+#
+def GetAllFiles(Db):
+ FileList = []
+ IncludeFileList = GetAllIncludeFiles(Db)
+ SourceFileList = GetAllSourceFiles(Db)
+ for Item in IncludeFileList:
+ if os.path.isfile(Item) and Item not in FileList:
+ FileList.append(Item)
+ for Item in SourceFileList:
+ if os.path.isfile(Item) and Item not in FileList:
+ FileList.append(Item)
+
+ return FileList
+
+## ParseConditionalStatement() method
+#
+# Parse conditional statement
+#
+# @param Line: One line to be parsed
+# @param Macros: A set of all macro
+# @param StatusSet: A set of all status
+#
+# @retval True: Find keyword of conditional statement
+# @retval False: Not find keyword of conditional statement
+#
+def ParseConditionalStatement(Line, Macros, StatusSet):
+ NewLine = Line.upper()
+ if NewLine.find(TAB_IF_EXIST.upper()) > -1:
+ IfLine = Line[NewLine.find(TAB_IF_EXIST) + len(TAB_IF_EXIST) + 1:].strip()
+ IfLine = ReplaceMacro(IfLine, EotGlobalData.gMACRO, True)
+ IfLine = ReplaceMacro(IfLine, Macros, True)
+ IfLine = IfLine.replace("\"", '')
+ IfLine = IfLine.replace("(", '')
+ IfLine = IfLine.replace(")", '')
+ Status = os.path.exists(os.path.normpath(IfLine))
+ StatusSet.append([Status])
+ return True
+ if NewLine.find(TAB_IF_DEF.upper()) > -1:
+ IfLine = Line[NewLine.find(TAB_IF_DEF) + len(TAB_IF_DEF) + 1:].strip()
+ Status = False
+ if IfLine in Macros or IfLine in EotGlobalData.gMACRO:
+ Status = True
+ StatusSet.append([Status])
+ return True
+ if NewLine.find(TAB_IF_N_DEF.upper()) > -1:
+ IfLine = Line[NewLine.find(TAB_IF_N_DEF) + len(TAB_IF_N_DEF) + 1:].strip()
+ Status = False
+ if IfLine not in Macros and IfLine not in EotGlobalData.gMACRO:
+ Status = True
+ StatusSet.append([Status])
+ return True
+ if NewLine.find(TAB_IF.upper()) > -1:
+ IfLine = Line[NewLine.find(TAB_IF) + len(TAB_IF) + 1:].strip()
+ Status = ParseConditionalStatementMacros(IfLine, Macros)
+ StatusSet.append([Status])
+ return True
+ if NewLine.find(TAB_ELSE_IF.upper()) > -1:
+ IfLine = Line[NewLine.find(TAB_ELSE_IF) + len(TAB_ELSE_IF) + 1:].strip()
+ Status = ParseConditionalStatementMacros(IfLine, Macros)
+ StatusSet[-1].append(Status)
+ return True
+ if NewLine.find(TAB_ELSE.upper()) > -1:
+ Status = False
+ for Item in StatusSet[-1]:
+ Status = Status or Item
+ StatusSet[-1].append(not Status)
+ return True
+ if NewLine.find(TAB_END_IF.upper()) > -1:
+ StatusSet.pop()
+ return True
+
+ return False
+
+## ParseConditionalStatement() method
+#
+# Parse conditional statement with Macros
+#
+# @param Line: One line to be parsed
+# @param Macros: A set of macros
+#
+# @return Line: New line after replacing macros
+#
+def ParseConditionalStatementMacros(Line, Macros):
+ if Line.upper().find('DEFINED(') > -1 or Line.upper().find('EXIST') > -1:
+ return False
+ Line = ReplaceMacro(Line, EotGlobalData.gMACRO, True)
+ Line = ReplaceMacro(Line, Macros, True)
+ Line = Line.replace("&&", "and")
+ Line = Line.replace("||", "or")
+ return eval(Line)
+
+## GetConditionalStatementStatus() method
+#
+# 1. Assume the latest status as True
+# 2. Pop the top status of status set, previous status
+# 3. Compare the latest one and the previous one and get new status
+#
+# @param StatusSet: A set of all status
+#
+# @return Status: The final status
+#
+def GetConditionalStatementStatus(StatusSet):
+ Status = True
+ for Item in StatusSet:
+ Status = Status and Item[-1]
+
+ return Status
+
+## SearchBelongsToFunction() method
+#
+# Search all functions belong to the file
+#
+# @param BelongsToFile: File id
+# @param StartLine: Start line of search scope
+# @param EndLine: End line of search scope
+#
+# @return: The found function
+#
+def SearchBelongsToFunction(BelongsToFile, StartLine, EndLine):
+ SqlCommand = """select ID, Name from Function where BelongsToFile = %s and StartLine <= %s and EndLine >= %s""" %(BelongsToFile, StartLine, EndLine)
+ RecordSet = EotGlobalData.gDb.TblFunction.Exec(SqlCommand)
+ if RecordSet != []:
+ return RecordSet[0][0], RecordSet[0][1]
+ else:
+ return -1, ''
+
+## SearchPpiCallFunction() method
+#
+# Search all used PPI calling function 'PeiServicesReInstallPpi' and 'PeiServicesInstallPpi'
+# Store the result to database
+#
+# @param Identifier: Table id
+# @param SourceFileID: Source file id
+# @param SourceFileFullPath: Source file full path
+# @param ItemMode: Mode of the item
+#
+def SearchPpiCallFunction(Identifier, SourceFileID, SourceFileFullPath, ItemMode):
+ ItemName, ItemType, GuidName, GuidMacro, GuidValue = '', 'Ppi', '', '', ''
+ SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
+ where (Name like '%%%s%%' and Model = %s)""" \
+ % (Identifier, 'PeiServicesReInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING)
+ BelongsToFunctionID, BelongsToFunction = -1, ''
+ Db = EotGlobalData.gDb.TblReport
+ RecordSet = Db.Exec(SqlCommand)
+ for Record in RecordSet:
+ Index = 0
+ BelongsToFile, StartLine, EndLine = Record[2], Record[3], Record[4]
+ BelongsToFunctionID, BelongsToFunction = SearchBelongsToFunction(BelongsToFile, StartLine, EndLine)
+ VariableList = Record[0].split(',')
+ for Variable in VariableList:
+ Variable = Variable.strip()
+ # Get index of the variable
+ if Variable.find('[') > -1:
+ Index = int(Variable[Variable.find('[') + 1 : Variable.find(']')])
+ Variable = Variable[:Variable.find('[')]
+ # Get variable name
+ if Variable.startswith('&'):
+ Variable = Variable[1:]
+ # Get variable value
+ SqlCommand = """select Value from %s where (Name like '%%%s%%') and Model = %s""" \
+ % (Identifier, Variable, MODEL_IDENTIFIER_VARIABLE)
+ NewRecordSet = Db.Exec(SqlCommand)
+ if NewRecordSet:
+ NewRecord = NewRecordSet[0][0]
+ VariableValueList = NewRecord.split('},')
+ if len(VariableValueList) > Index:
+ VariableValue = VariableValueList[Index]
+ NewVariableValueList = VariableValue.split(',')
+ if len(NewVariableValueList) > 1:
+ NewVariableValue = NewVariableValueList[1].strip()
+ if NewVariableValue.startswith('&'):
+ Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, NewVariableValue[1:], GuidMacro, GuidValue, BelongsToFunction, 0)
+ continue
+ else:
+ EotGlobalData.gOP_UN_MATCHED.write('%s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, NewParameter))
+
+ ItemName, ItemType, GuidName, GuidMacro, GuidValue = '', 'Ppi', '', '', ''
+ SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
+ where (Value like '%%%s%%' and Model = %s)""" \
+ % (Identifier, 'PeiServicesInstallPpi', MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION)
+ BelongsToFunctionID, BelongsToFunction = -1, ''
+ Db = EotGlobalData.gDb.TblReport
+ RecordSet = Db.Exec(SqlCommand)
+
+ SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
+ where (Name like '%%%s%%' and Model = %s)""" \
+ % (Identifier, 'PeiServicesInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING)
+ Db = EotGlobalData.gDb.TblReport
+ RecordSet2 = Db.Exec(SqlCommand)
+
+ for Record in RecordSet + RecordSet2:
+ if Record == []:
+ continue
+ Index = 0
+ BelongsToFile, StartLine, EndLine = Record[2], Record[3], Record[4]
+ BelongsToFunctionID, BelongsToFunction = SearchBelongsToFunction(BelongsToFile, StartLine, EndLine)
+ Variable = Record[0].replace('PeiServicesInstallPpi', '').replace('(', '').replace(')', '').replace('&', '').strip()
+ Variable = Variable[Variable.find(',') + 1:].strip()
+ # Get index of the variable
+ if Variable.find('[') > -1:
+ Index = int(Variable[Variable.find('[') + 1 : Variable.find(']')])
+ Variable = Variable[:Variable.find('[')]
+ # Get variable name
+ if Variable.startswith('&'):
+ Variable = Variable[1:]
+ # Get variable value
+ SqlCommand = """select Value from %s where (Name like '%%%s%%') and Model = %s""" \
+ % (Identifier, Variable, MODEL_IDENTIFIER_VARIABLE)
+ NewRecordSet = Db.Exec(SqlCommand)
+ if NewRecordSet:
+ NewRecord = NewRecordSet[0][0]
+ VariableValueList = NewRecord.split('},')
+ for VariableValue in VariableValueList[Index:]:
+ NewVariableValueList = VariableValue.split(',')
+ if len(NewVariableValueList) > 1:
+ NewVariableValue = NewVariableValueList[1].strip()
+ if NewVariableValue.startswith('&'):
+ Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, NewVariableValue[1:], GuidMacro, GuidValue, BelongsToFunction, 0)
+ continue
+ else:
+ EotGlobalData.gOP_UN_MATCHED.write('%s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, NewParameter))
+
+## SearchPpis() method
+#
+# Search all used PPI calling function
+# Store the result to database
+#
+# @param SqlCommand: SQL command statement
+# @param Table: Table id
+# @param SourceFileID: Source file id
+# @param SourceFileFullPath: Source file full path
+# @param ItemMode: Mode of the item
+# @param PpiMode: Mode of PPI
+#
+def SearchPpi(SqlCommand, Table, SourceFileID, SourceFileFullPath, ItemMode, PpiMode = 1):
+ ItemName, ItemType, GuidName, GuidMacro, GuidValue = '', 'Ppi', '', '', ''
+ BelongsToFunctionID, BelongsToFunction = -1, ''
+ Db = EotGlobalData.gDb.TblReport
+ RecordSet = Db.Exec(SqlCommand)
+ for Record in RecordSet:
+ Parameter = GetPpiParameter(Record[0], PpiMode)
+ BelongsToFile, StartLine, EndLine = Record[2], Record[3], Record[4]
+ # Get BelongsToFunction
+ BelongsToFunctionID, BelongsToFunction = SearchBelongsToFunction(BelongsToFile, StartLine, EndLine)
+
+ # Default is Not Found
+ IsFound = False
+
+ # For Consumed Ppi
+ if ItemMode == 'Consumed':
+ if Parameter.startswith('g'):
+ Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, Parameter, GuidMacro, GuidValue, BelongsToFunction, 0)
+ else:
+ EotGlobalData.gOP_UN_MATCHED.write('%s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, Parameter))
+ continue
+
+ # Direct Parameter.Guid
+ SqlCommand = """select Value from %s where (Name like '%%%s.Guid%%' or Name like '%%%s->Guid%%') and Model = %s""" % (Table, Parameter, Parameter, MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION)
+ NewRecordSet = Db.Exec(SqlCommand)
+ for NewRecord in NewRecordSet:
+ GuidName = GetParameterName(NewRecord[0])
+ Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
+ IsFound = True
+
+ # Defined Parameter
+ if not IsFound:
+ Key = Parameter
+ if Key.rfind(' ') > -1:
+ Key = Key[Key.rfind(' ') : ].strip().replace('&', '')
+ Value = FindKeyValue(EotGlobalData.gDb.TblFile, Table, Key)
+ List = GetSplitValueList(Value.replace('\n', ''), TAB_COMMA_SPLIT)
+ if len(List) > 1:
+ GuidName = GetParameterName(List[1])
+ Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
+ IsFound = True
+
+ # A list Parameter
+ if not IsFound:
+ Start = Parameter.find('[')
+ End = Parameter.find(']')
+ if Start > -1 and End > -1 and Start < End:
+ try:
+ Index = int(Parameter[Start + 1 : End])
+ Parameter = Parameter[0 : Start]
+ SqlCommand = """select Value from %s where Name = '%s' and Model = %s""" % (Table, Parameter, MODEL_IDENTIFIER_VARIABLE)
+ NewRecordSet = Db.Exec(SqlCommand)
+ for NewRecord in NewRecordSet:
+ NewParameter = GetSplitValueList(NewRecord[0], '}')[Index]
+ GuidName = GetPpiParameter(NewParameter[NewParameter.find('{') : ])
+ Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
+ IsFound = True
+ except Exception:
+ pass
+
+ # A External Parameter
+ if not IsFound:
+ SqlCommand = """select File.ID from Inf, File
+ where BelongsToFile = (select BelongsToFile from Inf where Value1 = '%s')
+ and Inf.Model = %s and Inf.Value1 = File.FullPath and File.Model = %s""" % (SourceFileFullPath, MODEL_EFI_SOURCE_FILE, MODEL_FILE_C)
+ NewRecordSet = Db.Exec(SqlCommand)
+ for NewRecord in NewRecordSet:
+ Table = 'Identifier' + str(NewRecord[0])
+ SqlCommand = """select Value from %s where Name = '%s' and Modifier = 'EFI_PEI_PPI_DESCRIPTOR' and Model = %s""" % (Table, Parameter, MODEL_IDENTIFIER_VARIABLE)
+ PpiSet = Db.Exec(SqlCommand)
+ if PpiSet != []:
+ GuidName = GetPpiParameter(PpiSet[0][0])
+ if GuidName != '':
+ Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
+ IsFound = True
+ break
+
+ if not IsFound:
+ EotGlobalData.gOP_UN_MATCHED.write('%s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, Parameter))
+
+## SearchProtocols() method
+#
+# Search all used PROTOCOL calling function
+# Store the result to database
+#
+# @param SqlCommand: SQL command statement
+# @param Table: Table id
+# @param SourceFileID: Source file id
+# @param SourceFileFullPath: Source file full path
+# @param ItemMode: Mode of the item
+# @param ProtocolMode: Mode of PROTOCOL
+#
+def SearchProtocols(SqlCommand, Table, SourceFileID, SourceFileFullPath, ItemMode, ProtocolMode):
+ ItemName, ItemType, GuidName, GuidMacro, GuidValue = '', 'Protocol', '', '', ''
+ BelongsToFunctionID, BelongsToFunction = -1, ''
+ Db = EotGlobalData.gDb.TblReport
+ RecordSet = Db.Exec(SqlCommand)
+ for Record in RecordSet:
+ Parameter = ''
+ BelongsToFile, StartLine, EndLine = Record[2], Record[3], Record[4]
+ # Get BelongsToFunction
+ BelongsToFunctionID, BelongsToFunction = SearchBelongsToFunction(BelongsToFile, StartLine, EndLine)
+
+ # Default is Not Found
+ IsFound = False
+
+ if ProtocolMode == 0 or ProtocolMode == 1:
+ Parameter = GetProtocolParameter(Record[0], ProtocolMode)
+ if Parameter.startswith('g') or Parameter.endswith('Guid') or Parameter == 'ShellEnvProtocol' or Parameter == 'ShellInterfaceProtocol':
+ GuidName = GetParameterName(Parameter)
+ Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
+ IsFound = True
+
+ if ProtocolMode == 2:
+ Protocols = GetSplitValueList(Record[0], TAB_COMMA_SPLIT)
+ for Protocol in Protocols:
+ if Protocol.startswith('&') and Protocol.endswith('Guid'):
+ GuidName = GetParameterName(Protocol)
+ Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
+ IsFound = True
+ else:
+ NewValue = FindKeyValue(EotGlobalData.gDb.TblFile, Table, Protocol)
+ if Protocol != NewValue and NewValue.endswith('Guid'):
+ GuidName = GetParameterName(NewValue)
+ Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
+ IsFound = True
+
+ if not IsFound:
+ if BelongsToFunction in EotGlobalData.gProducedProtocolLibrary or BelongsToFunction in EotGlobalData.gConsumedProtocolLibrary:
+ EotGlobalData.gOP_UN_MATCHED_IN_LIBRARY_CALLING.write('%s, %s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, Parameter, BelongsToFunction))
+ else:
+ EotGlobalData.gOP_UN_MATCHED.write('%s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, Parameter))
+
+## SearchFunctionCalling() method
+#
+# Search all used PPI/PROTOCOL calling function by library
+# Store the result to database
+#
+# @param SqlCommand: SQL command statement
+# @param Table: Table id
+# @param SourceFileID: Source file id
+# @param SourceFileFullPath: Source file full path
+# @param ItemType: Type of the item, PPI or PROTOCOL
+# @param ItemMode: Mode of item
+#
+def SearchFunctionCalling(Table, SourceFileID, SourceFileFullPath, ItemType, ItemMode):
+ LibraryList = {}
+ Db = EotGlobalData.gDb.TblReport
+ Parameters, ItemName, GuidName, GuidMacro, GuidValue, BelongsToFunction = [], '', '', '', '', ''
+ if ItemType == 'Protocol' and ItemMode == 'Produced':
+ LibraryList = EotGlobalData.gProducedProtocolLibrary
+ elif ItemType == 'Protocol' and ItemMode == 'Consumed':
+ LibraryList = EotGlobalData.gConsumedProtocolLibrary
+ elif ItemType == 'Protocol' and ItemMode == 'Callback':
+ LibraryList = EotGlobalData.gCallbackProtocolLibrary
+ elif ItemType == 'Ppi' and ItemMode == 'Produced':
+ LibraryList = EotGlobalData.gProducedPpiLibrary
+ elif ItemType == 'Ppi' and ItemMode == 'Consumed':
+ LibraryList = EotGlobalData.gConsumedPpiLibrary
+
+ for Library in LibraryList:
+ Index = LibraryList[Library]
+ SqlCommand = """select Value, StartLine from %s
+ where Name like '%%%s%%' and Model = %s""" \
+ % (Table, Library, MODEL_IDENTIFIER_FUNCTION_CALLING)
+ RecordSet = Db.Exec(SqlCommand)
+ for Record in RecordSet:
+ IsFound = False
+ if Index == -1:
+ ParameterList = GetSplitValueList(Record[0], TAB_COMMA_SPLIT)
+ for Parameter in ParameterList:
+ Parameters.append(GetParameterName(Parameter))
+ else:
+ Parameters = [GetProtocolParameter(Record[0], Index)]
+ StartLine = Record[1]
+ for Parameter in Parameters:
+ if Parameter.startswith('g') or Parameter.endswith('Guid') or Parameter == 'ShellEnvProtocol' or Parameter == 'ShellInterfaceProtocol':
+ GuidName = GetParameterName(Parameter)
+ Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
+ IsFound = True
+
+ if not IsFound:
+ EotGlobalData.gOP_UN_MATCHED.write('%s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, Parameter))
+
+## FindProtocols() method
+#
+# Find defined protocols
+#
+# @param SqlCommand: SQL command statement
+# @param Table: Table id
+# @param SourceFileID: Source file id
+# @param SourceFileFullPath: Source file full path
+# @param ItemName: String of protocol definition
+# @param ItemType: Type of the item, PPI or PROTOCOL
+# @param ItemMode: Mode of item
+#
+#def FindProtocols(Db, SqlCommand, Table, SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue):
+# BelongsToFunction = ''
+# RecordSet = Db.Exec(SqlCommand)
+# for Record in RecordSet:
+# IsFound = True
+# Parameter = GetProtocolParameter(Record[0])
+
+## GetProtocolParameter() method
+#
+# Parse string of protocol and find parameters
+#
+# @param Parameter: Parameter to be parsed
+# @param Index: The index of the parameter
+#
+# @return: call common GetParameter
+#
+def GetProtocolParameter(Parameter, Index = 1):
+ return GetParameter(Parameter, Index)
+
+## GetPpiParameter() method
+#
+# Parse string of ppi and find parameters
+#
+# @param Parameter: Parameter to be parsed
+# @param Index: The index of the parameter
+#
+# @return: call common GetParameter
+#
+def GetPpiParameter(Parameter, Index = 1):
+ return GetParameter(Parameter, Index)
+
+## GetParameter() method
+#
+# Get a parameter by index
+#
+# @param Parameter: Parameter to be parsed
+# @param Index: The index of the parameter
+#
+# @return Parameter: The found parameter
+#
+def GetParameter(Parameter, Index = 1):
+ ParameterList = GetSplitValueList(Parameter, TAB_COMMA_SPLIT)
+ if len(ParameterList) > Index:
+ Parameter = GetParameterName(ParameterList[Index])
+
+ return Parameter
+
+ return ''
+
+## GetParameterName() method
+#
+# Get a parameter name
+#
+# @param Parameter: Parameter to be parsed
+#
+# @return: The name of parameter
+#
+def GetParameterName(Parameter):
+ if isinstance(Parameter, type('')) and Parameter.startswith('&'):
+ return Parameter[1:].replace('{', '').replace('}', '').replace('\r', '').replace('\n', '').strip()
+ else:
+ return Parameter.strip()
+
+## FindKeyValue() method
+#
+# Find key value of a variable
+#
+# @param Db: Database to be searched
+# @param Table: Table to be searched
+# @param Key: The keyword
+#
+# @return Value: The value of the keyword
+#
+def FindKeyValue(Db, Table, Key):
+ SqlCommand = """select Value from %s where Name = '%s' and (Model = %s or Model = %s)""" % (Table, Key, MODEL_IDENTIFIER_VARIABLE, MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION)
+ RecordSet = Db.Exec(SqlCommand)
+ Value = ''
+ for Record in RecordSet:
+ if Record[0] != 'NULL':
+ Value = FindKeyValue(Db, Table, GetParameterName(Record[0]))
+
+ if Value != '':
+ return Value
+ else:
+ return Key
+
+## ParseMapFile() method
+#
+# Parse map files to get a dict of 'ModuleName' : {FunName : FunAddress}
+#
+# @param Files: A list of map files
+#
+# @return AllMaps: An object of all map files
+#
+def ParseMapFile(Files):
+ AllMaps = {}
+ CurrentModule = ''
+ CurrentMaps = {}
+ for File in Files:
+ Content = open(File, 'r').readlines()
+ for Line in Content:
+ Line = CleanString(Line)
+ # skip empty line
+ if Line == '':
+ continue
+
+ if Line.find('(') > -1 and Line.find(')') > -1:
+ if CurrentModule != '' and CurrentMaps != {}:
+ AllMaps[CurrentModule] = CurrentMaps
+ CurrentModule = Line[:Line.find('(')]
+ CurrentMaps = {}
+ continue
+ else:
+ Name = ''
+ Address = ''
+ List = Line.split()
+ Address = List[0]
+ if List[1] == 'F' or List[1] == 'FS':
+ Name = List[2]
+ else:
+ Name = List[1]
+ CurrentMaps[Name] = Address
+ continue
+
+ return AllMaps
+
+## ConvertGuid
+#
+# Convert a GUID to a GUID with all upper letters
+#
+# @param guid: The GUID to be converted
+#
+# @param newGuid: The GUID with all upper letters.
+#
+def ConvertGuid(guid):
+ numList = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
+ newGuid = ''
+ if guid.startswith('g'):
+ guid = guid[1:]
+ for i in guid:
+ if i.upper() == i and i not in numList:
+ newGuid = newGuid + ('_' + i)
+ else:
+ newGuid = newGuid + i.upper()
+ if newGuid.startswith('_'):
+ newGuid = newGuid[1:]
+ if newGuid.endswith('_'):
+ newGuid = newGuid[:-1]
+
+ return newGuid
+
+## ConvertGuid2() method
+#
+# Convert a GUID to a GUID with new string instead of old string
+#
+# @param guid: The GUID to be converted
+# @param old: Old string to be replaced
+# @param new: New string to replace the old one
+#
+# @param newGuid: The GUID after replacement
+#
+def ConvertGuid2(guid, old, new):
+ newGuid = ConvertGuid(guid)
+ newGuid = newGuid.replace(old, new)
+
+ return newGuid
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/ParserWarning.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/ParserWarning.py
new file mode 100755
index 00000000..32645814
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/ParserWarning.py
@@ -0,0 +1,20 @@
+## @file
+# Warning information of Eot
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+class Warning (Exception):
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param File The FDF name
+ # @param Line The Line number that error occurs
+ #
+ def __init__(self, Str, File = None, Line = None):
+ self.message = Str
+ self.FileName = File
+ self.LineNumber = Line
+ self.ToolName = 'EOT'
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Report.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Report.py
new file mode 100755
index 00000000..1d85c715
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/Report.py
@@ -0,0 +1,468 @@
+## @file
+# This file is used to create report for Eot tool
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.LongFilePathOs as os
+from . import EotGlobalData
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+## Report() class
+#
+# This class defined Report
+#
+# @param object: Inherited from object class
+#
+class Report(object):
+ ## The constructor
+ #
+ # @param self: The object pointer
+ # @param ReportName: name of the report
+ # @param FvObj: FV object after parsing FV images
+ #
+ def __init__(self, ReportName = 'Report.html', FvObj = None, DispatchName=None):
+ self.ReportName = ReportName
+ self.Op = open(ReportName, 'w+')
+ self.DispatchList = None
+ if DispatchName:
+ self.DispatchList = open(DispatchName, 'w+')
+ self.FvObj = FvObj
+ self.FfsIndex = 0
+ self.PpiIndex = 0
+ self.ProtocolIndex = 0
+ if EotGlobalData.gMACRO['EFI_SOURCE'] == '':
+ EotGlobalData.gMACRO['EFI_SOURCE'] = EotGlobalData.gMACRO['EDK_SOURCE']
+
+ ## WriteLn() method
+ #
+ # Write a line in the report
+ #
+ # @param self: The object pointer
+ # @param Line: The lint to be written into
+ #
+ def WriteLn(self, Line):
+ self.Op.write('%s\n' % Line)
+
+ ## GenerateReport() method
+ #
+ # A caller to generate report
+ #
+ # @param self: The object pointer
+ #
+ def GenerateReport(self):
+ self.GenerateHeader()
+ self.GenerateFv()
+ self.GenerateTail()
+ self.Op.close()
+ self.GenerateUnDispatchedList()
+
+ ## GenerateUnDispatchedList() method
+ #
+ # Create a list for not dispatched items
+ #
+ # @param self: The object pointer
+ #
+ def GenerateUnDispatchedList(self):
+ FvObj = self.FvObj
+ EotGlobalData.gOP_UN_DISPATCHED.write('%s\n' % FvObj.Name)
+ for Item in FvObj.UnDispatchedFfsDict.keys():
+ EotGlobalData.gOP_UN_DISPATCHED.write('%s\n' % FvObj.UnDispatchedFfsDict[Item])
+
+ ## GenerateFv() method
+ #
+ # Generate FV information
+ #
+ # @param self: The object pointer
+ #
+ def GenerateFv(self):
+ FvObj = self.FvObj
+ Content = """ <tr>
+ <td width="20%%"><strong>Name</strong></td>
+ <td width="60%%"><strong>Guid</strong></td>
+ <td width="20%%"><strong>Size</strong></td>
+ </tr>"""
+ self.WriteLn(Content)
+
+ for Info in FvObj.BasicInfo:
+ FvName = Info[0]
+ FvGuid = Info[1]
+ FvSize = Info[2]
+
+ Content = """ <tr>
+ <td>%s</td>
+ <td>%s</td>
+ <td>%s</td>
+ </tr>""" % (FvName, FvGuid, FvSize)
+ self.WriteLn(Content)
+
+ Content = """ <td colspan="3"><table width="100%%" border="1">
+ <tr>"""
+ self.WriteLn(Content)
+
+ EotGlobalData.gOP_DISPATCH_ORDER.write('Dispatched:\n')
+ for FfsId in FvObj.OrderedFfsDict.keys():
+ self.GenerateFfs(FvObj.OrderedFfsDict[FfsId])
+ Content = """ </table></td>
+ </tr>"""
+ self.WriteLn(Content)
+
+ # For UnDispatched
+ Content = """ <td colspan="3"><table width="100%%" border="1">
+ <tr>
+ <tr><strong>UnDispatched</strong></tr>"""
+ self.WriteLn(Content)
+
+ EotGlobalData.gOP_DISPATCH_ORDER.write('\nUnDispatched:\n')
+ for FfsId in FvObj.UnDispatchedFfsDict.keys():
+ self.GenerateFfs(FvObj.UnDispatchedFfsDict[FfsId])
+ Content = """ </table></td>
+ </tr>"""
+ self.WriteLn(Content)
+
+ ## GenerateDepex() method
+ #
+ # Generate Depex information
+ #
+ # @param self: The object pointer
+ # @param DepexString: A DEPEX string needed to be parsed
+ #
+ def GenerateDepex(self, DepexString):
+ NonGuidList = ['AND', 'OR', 'NOT', 'BEFORE', 'AFTER', 'TRUE', 'FALSE']
+ ItemList = DepexString.split(' ')
+ DepexString = ''
+ for Item in ItemList:
+ if Item not in NonGuidList:
+ SqlCommand = """select DISTINCT GuidName from Report where GuidValue like '%s' and ItemMode = 'Produced' group by GuidName""" % (Item)
+ RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ if RecordSet != []:
+ Item = RecordSet[0][0]
+ DepexString = DepexString + Item + ' '
+ Content = """ <tr>
+ <td width="5%%"></td>
+ <td width="95%%">%s</td>
+ </tr>""" % (DepexString)
+ self.WriteLn(Content)
+
+ ## GeneratePpi() method
+ #
+ # Generate PPI information
+ #
+ # @param self: The object pointer
+ # @param Name: CName of a GUID
+ # @param Guid: Value of a GUID
+ # @param Type: Type of a GUID
+ #
+ def GeneratePpi(self, Name, Guid, Type):
+ self.GeneratePpiProtocol('Ppi', Name, Guid, Type, self.PpiIndex)
+
+ ## GenerateProtocol() method
+ #
+ # Generate PROTOCOL information
+ #
+ # @param self: The object pointer
+ # @param Name: CName of a GUID
+ # @param Guid: Value of a GUID
+ # @param Type: Type of a GUID
+ #
+ def GenerateProtocol(self, Name, Guid, Type):
+ self.GeneratePpiProtocol('Protocol', Name, Guid, Type, self.ProtocolIndex)
+
+ ## GeneratePpiProtocol() method
+ #
+ # Generate PPI/PROTOCOL information
+ #
+ # @param self: The object pointer
+ # @param Model: Model of a GUID, PPI or PROTOCOL
+ # @param Name: Name of a GUID
+ # @param Guid: Value of a GUID
+ # @param Type: Type of a GUID
+ # @param CName: CName(Index) of a GUID
+ #
+ def GeneratePpiProtocol(self, Model, Name, Guid, Type, CName):
+ Content = """ <tr>
+ <td width="5%%"></td>
+ <td width="10%%">%s</td>
+ <td width="85%%" colspan="3">%s</td>
+ <!-- %s -->
+ </tr>""" % (Model, Name, Guid)
+ self.WriteLn(Content)
+ if Type == 'Produced':
+ SqlCommand = """select DISTINCT SourceFileFullPath, BelongsToFunction from Report where GuidName like '%s' and ItemMode = 'Callback'""" % Name
+ RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ for Record in RecordSet:
+ SqlCommand = """select FullPath from File
+ where ID = (
+ select DISTINCT BelongsToFile from Inf
+ where Value1 like '%s')""" % Record[0]
+ ModuleSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ Inf = ModuleSet[0][0].replace(EotGlobalData.gMACRO['WORKSPACE'], '.')
+ Function = Record[1]
+ Address = ''
+ for Item in EotGlobalData.gMap:
+ if Function in EotGlobalData.gMap[Item]:
+ Address = EotGlobalData.gMap[Item][Function]
+ break
+ if '_' + Function in EotGlobalData.gMap[Item]:
+ Address = EotGlobalData.gMap[Item]['_' + Function]
+ break
+ Content = """ <tr>
+ <td width="5%%"></td>
+ <td width="10%%">%s</td>
+ <td width="40%%">%s</td>
+ <td width="35%%">%s</td>
+ <td width="10%%">%s</td>
+ </tr>""" % ('Callback', Inf, Function, Address)
+ self.WriteLn(Content)
+
+ ## GenerateFfs() method
+ #
+ # Generate FFS information
+ #
+ # @param self: The object pointer
+ # @param FfsObj: FFS object after FV image is parsed
+ #
+ def GenerateFfs(self, FfsObj):
+ self.FfsIndex = self.FfsIndex + 1
+ if FfsObj is not None and FfsObj.Type in [0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xA]:
+ FfsGuid = FfsObj.Guid
+ FfsOffset = FfsObj._OFF_
+ FfsName = 'Unknown-Module'
+ FfsPath = FfsGuid
+ FfsType = FfsObj._TypeName[FfsObj.Type]
+
+ # Hard code for Binary INF
+ if FfsGuid.upper() == '7BB28B99-61BB-11D5-9A5D-0090273FC14D':
+ FfsName = 'Logo'
+
+ if FfsGuid.upper() == '7E374E25-8E01-4FEE-87F2-390C23C606CD':
+ FfsName = 'AcpiTables'
+
+ if FfsGuid.upper() == '961578FE-B6B7-44C3-AF35-6BC705CD2B1F':
+ FfsName = 'Fat'
+
+ # Find FFS Path and Name
+ SqlCommand = """select Value2 from Inf
+ where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and lower(Value2) = lower('%s') and Model = %s)
+ and Model = %s and Value1='BASE_NAME'""" % (FfsGuid, 5001, 5001)
+ RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ if RecordSet != []:
+ FfsName = RecordSet[0][0]
+
+ SqlCommand = """select FullPath from File
+ where ID = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and lower(Value2) = lower('%s') and Model = %s)
+ and Model = %s""" % (FfsGuid, 5001, 1011)
+ RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ if RecordSet != []:
+ FfsPath = RecordSet[0][0]
+
+ Content = """ <tr>
+ <tr class='styleFfs' id='FfsHeader%s'>
+ <td width="55%%"><span onclick="Display('FfsHeader%s', 'Ffs%s')" onMouseOver="funOnMouseOver()" onMouseOut="funOnMouseOut()">%s</span></td>
+ <td width="15%%">%s</td>
+ <!--<td width="20%%">%s</td>-->
+ <!--<td width="20%%">%s</td>-->
+ <td width="10%%">%s</td>
+ </tr>
+ <tr id='Ffs%s' style='display:none;'>
+ <td colspan="4"><table width="100%%" border="1">""" % (self.FfsIndex, self.FfsIndex, self.FfsIndex, FfsPath, FfsName, FfsGuid, FfsOffset, FfsType, self.FfsIndex)
+
+ if self.DispatchList:
+ if FfsObj.Type in [0x04, 0x06]:
+ self.DispatchList.write("%s %s %s %s\n" % (FfsGuid, "P", FfsName, FfsPath))
+ if FfsObj.Type in [0x05, 0x07, 0x08, 0x0A]:
+ self.DispatchList.write("%s %s %s %s\n" % (FfsGuid, "D", FfsName, FfsPath))
+
+ self.WriteLn(Content)
+
+ EotGlobalData.gOP_DISPATCH_ORDER.write('%s\n' %FfsName)
+
+ if FfsObj.Depex != '':
+ Content = """ <tr>
+ <td><span id='DepexHeader%s' class="styleDepex" onclick="Display('DepexHeader%s', 'Depex%s')" onMouseOver="funOnMouseOver()" onMouseOut="funOnMouseOut()">&nbsp&nbspDEPEX expression</span></td>
+ </tr>
+ <tr id='Depex%s' style='display:none;'>
+ <td><table width="100%%" border="1">""" % (self.FfsIndex, self.FfsIndex, self.FfsIndex, self.FfsIndex)
+ self.WriteLn(Content)
+ self.GenerateDepex(FfsObj.Depex)
+ Content = """ </table></td>
+ </tr>"""
+ self.WriteLn(Content)
+ # End of DEPEX
+
+ # Find Consumed Ppi/Protocol
+ SqlCommand = """select ModuleName, ItemType, GuidName, GuidValue, GuidMacro from Report
+ where SourceFileFullPath in
+ (select Value1 from Inf where BelongsToFile =
+ (select BelongsToFile from Inf
+ where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s)
+ and Model = %s)
+ and ItemMode = 'Consumed' group by GuidName order by ItemType""" \
+ % (FfsGuid, 5001, 3007)
+
+ RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ if RecordSet != []:
+ Count = len(RecordSet)
+ Content = """ <tr>
+ <td><span id='ConsumedHeader%s' class="styleConsumed" onclick="Display('ConsumedHeader%s', 'Consumed%s')" onMouseOver="funOnMouseOver()" onMouseOut="funOnMouseOut()">&nbsp&nbspConsumed Ppis/Protocols List (%s)</span></td>
+ </tr>
+ <tr id='Consumed%s' style='display:none;'>
+ <td><table width="100%%" border="1">""" % (self.FfsIndex, self.FfsIndex, self.FfsIndex, Count, self.FfsIndex)
+ self.WriteLn(Content)
+ self.ProtocolIndex = 0
+ for Record in RecordSet:
+ self.ProtocolIndex = self.ProtocolIndex + 1
+ Name = Record[2]
+ CName = Record[4]
+ Guid = Record[3]
+ Type = Record[1]
+ self.GeneratePpiProtocol(Type, Name, Guid, 'Consumed', CName)
+
+ Content = """ </table></td>
+ </tr>"""
+ self.WriteLn(Content)
+ #End of Consumed Ppi/Protocol
+
+ # Find Produced Ppi/Protocol
+ SqlCommand = """select ModuleName, ItemType, GuidName, GuidValue, GuidMacro from Report
+ where SourceFileFullPath in
+ (select Value1 from Inf where BelongsToFile =
+ (select BelongsToFile from Inf
+ where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s)
+ and Model = %s)
+ and ItemMode = 'Produced' group by GuidName order by ItemType""" \
+ % (FfsGuid, 5001, 3007)
+
+ RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
+ if RecordSet != []:
+ Count = len(RecordSet)
+ Content = """ <tr>
+ <td><span id='ProducedHeader%s' class="styleProduced" onclick="Display('ProducedHeader%s', 'Produced%s')" onMouseOver="funOnMouseOver()" onMouseOut="funOnMouseOut()">&nbsp&nbspProduced Ppis/Protocols List (%s)</span></td>
+ </tr>
+ <tr id='Produced%s' style='display:none;'>
+ <td><table width="100%%" border="1">""" % (self.FfsIndex, self.FfsIndex, self.FfsIndex, Count, self.FfsIndex)
+ self.WriteLn(Content)
+ self.PpiIndex = 0
+ for Record in RecordSet:
+ self.PpiIndex = self.PpiIndex + 1
+ Name = Record[2]
+ CName = Record[4]
+ Guid = Record[3]
+ Type = Record[1]
+ self.GeneratePpiProtocol(Type, Name, Guid, 'Produced', CName)
+
+ Content = """ </table></td>
+ </tr>"""
+ self.WriteLn(Content)
+ RecordSet = None
+ # End of Produced Ppi/Protocol
+
+ Content = """ </table></td>
+ </tr>"""
+ self.WriteLn(Content)
+
+ ## GenerateTail() method
+ #
+ # Generate end tags of HTML report
+ #
+ # @param self: The object pointer
+ #
+ def GenerateTail(self):
+ Tail = """</table>
+</body>
+</html>"""
+ self.WriteLn(Tail)
+
+ ## GenerateHeader() method
+ #
+ # Generate start tags of HTML report
+ #
+ # @param self: The object pointer
+ #
+ def GenerateHeader(self):
+ Header = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+"http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<title>Execution Order Tool Report</title>
+<meta http-equiv="Content-Type" content="text/html">
+<style type="text/css">
+<!--
+.styleFfs {
+ color: #006600;
+ font-weight: bold;
+}
+.styleDepex {
+ color: #FF0066;
+ font-weight: bold;
+}
+.styleProduced {
+ color: #0000FF;
+ font-weight: bold;
+}
+.styleConsumed {
+ color: #FF00FF;
+ font-weight: bold;
+}
+-->
+</style>
+<Script type="text/javascript">
+function Display(ParentID, SubID)
+{
+ SubItem = document.getElementById(SubID);
+ ParentItem = document.getElementById(ParentID);
+ if (SubItem.style.display == 'none')
+ {
+ SubItem.style.display = ''
+ ParentItem.style.fontWeight = 'normal'
+ }
+ else
+ {
+ SubItem.style.display = 'none'
+ ParentItem.style.fontWeight = 'bold'
+ }
+
+}
+
+function funOnMouseOver()
+{
+ document.body.style.cursor = "hand";
+}
+
+function funOnMouseOut()
+{
+ document.body.style.cursor = "";
+}
+
+</Script>
+</head>
+
+<body>
+<table width="100%%" border="1">"""
+ self.WriteLn(Header)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ # Initialize log system
+ FilePath = 'FVRECOVERYFLOPPY.fv'
+ if FilePath.lower().endswith(".fv"):
+ fd = open(FilePath, 'rb')
+ buf = array('B')
+ try:
+ buf.fromfile(fd, os.path.getsize(FilePath))
+ except EOFError:
+ pass
+
+ fv = FirmwareVolume("FVRECOVERY", buf, 0)
+
+ report = Report('Report.html', fv)
+ report.GenerateReport()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/__init__.py
new file mode 100644
index 00000000..04923660
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'Eot' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/c.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/c.py
new file mode 100755
index 00000000..8aa45d72
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Eot/c.py
@@ -0,0 +1,383 @@
+## @file
+# preprocess source file
+#
+# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+import sys
+import Common.LongFilePathOs as os
+import re
+from . import CodeFragmentCollector
+from . import FileProfile
+from CommonDataClass import DataClass
+from Common import EdkLogger
+from .EotToolError import *
+from . import EotGlobalData
+
+# Global Dicts
+IncludeFileListDict = {}
+IncludePathListDict = {}
+ComplexTypeDict = {}
+SUDict = {}
+
+## GetFuncDeclPattern() method
+#
+# Get the pattern of function declaration
+#
+# @return p: the pattern of function declaration
+#
+def GetFuncDeclPattern():
+ p = re.compile(r'(EFIAPI|EFI_BOOT_SERVICE|EFI_RUNTIME_SERVICE)?\s*[_\w]+\s*\(.*\).*', re.DOTALL)
+ return p
+
+## GetArrayPattern() method
+#
+# Get the pattern of array
+#
+# @return p: the pattern of array
+#
+def GetArrayPattern():
+ p = re.compile(r'[_\w]*\s*[\[.*\]]+')
+ return p
+
+## GetTypedefFuncPointerPattern() method
+#
+# Get the pattern of function pointer
+#
+# @return p: the pattern of function pointer
+#
+def GetTypedefFuncPointerPattern():
+ p = re.compile('[_\w\s]*\([\w\s]*\*+\s*[_\w]+\s*\)\s*\(.*\)', re.DOTALL)
+ return p
+
+## GetDB() method
+#
+# Get global database instance
+#
+# @return EotGlobalData.gDb: the global database instance
+#
+def GetDB():
+ return EotGlobalData.gDb
+
+## PrintErrorMsg() method
+#
+# print error message
+#
+# @param ErrorType: Type of error
+# @param Msg: Error message
+# @param TableName: table name of error found
+# @param ItemId: id of item
+#
+def PrintErrorMsg(ErrorType, Msg, TableName, ItemId):
+ Msg = Msg.replace('\n', '').replace('\r', '')
+ MsgPartList = Msg.split()
+ Msg = ''
+ for Part in MsgPartList:
+ Msg += Part
+ Msg += ' '
+ GetDB().TblReport.Insert(ErrorType, OtherMsg = Msg, BelongsToTable = TableName, BelongsToItem = ItemId)
+
+## GetIdType() method
+#
+# Find type of input string
+#
+# @param Str: String to be parsed
+#
+# @return Type: The type of the string
+#
+def GetIdType(Str):
+ Type = DataClass.MODEL_UNKNOWN
+ Str = Str.replace('#', '# ')
+ List = Str.split()
+ if List[1] == 'include':
+ Type = DataClass.MODEL_IDENTIFIER_INCLUDE
+ elif List[1] == 'define':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_DEFINE
+ elif List[1] == 'ifdef':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_IFDEF
+ elif List[1] == 'ifndef':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_IFNDEF
+ elif List[1] == 'endif':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_ENDIF
+ elif List[1] == 'pragma':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_PROGMA
+ else:
+ Type = DataClass.MODEL_UNKNOWN
+ return Type
+
+## GetIdentifierList() method
+#
+# Get id of all files
+#
+# @return IdList: The list of all id of files
+#
+def GetIdentifierList():
+ IdList = []
+
+ for pp in FileProfile.PPDirectiveList:
+ Type = GetIdType(pp.Content)
+ IdPP = DataClass.IdentifierClass(-1, '', '', '', pp.Content, Type, -1, -1, pp.StartPos[0], pp.StartPos[1], pp.EndPos[0], pp.EndPos[1])
+ IdList.append(IdPP)
+
+ for ae in FileProfile.AssignmentExpressionList:
+ IdAE = DataClass.IdentifierClass(-1, ae.Operator, '', ae.Name, ae.Value, DataClass.MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION, -1, -1, ae.StartPos[0], ae.StartPos[1], ae.EndPos[0], ae.EndPos[1])
+ IdList.append(IdAE)
+
+ FuncDeclPattern = GetFuncDeclPattern()
+ ArrayPattern = GetArrayPattern()
+ for var in FileProfile.VariableDeclarationList:
+ DeclText = var.Declarator.strip()
+ while DeclText.startswith('*'):
+ var.Modifier += '*'
+ DeclText = DeclText.lstrip('*').strip()
+ var.Declarator = DeclText
+ if FuncDeclPattern.match(var.Declarator):
+ DeclSplitList = var.Declarator.split('(')
+ FuncName = DeclSplitList[0]
+ FuncNamePartList = FuncName.split()
+ if len(FuncNamePartList) > 1:
+ FuncName = FuncNamePartList[-1]
+ Index = 0
+ while Index < len(FuncNamePartList) - 1:
+ var.Modifier += ' ' + FuncNamePartList[Index]
+ var.Declarator = var.Declarator.lstrip().lstrip(FuncNamePartList[Index])
+ Index += 1
+ IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', var.Declarator, '', DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION, -1, -1, var.StartPos[0], var.StartPos[1], var.EndPos[0], var.EndPos[1])
+ IdList.append(IdVar)
+ continue
+
+ if var.Declarator.find('{') == -1:
+ for decl in var.Declarator.split(','):
+ DeclList = decl.split('=')
+ Name = DeclList[0].strip()
+ if ArrayPattern.match(Name):
+ LSBPos = var.Declarator.find('[')
+ var.Modifier += ' ' + Name[LSBPos:]
+ Name = Name[0:LSBPos]
+
+ IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0], var.StartPos[1], var.EndPos[0], var.EndPos[1])
+ IdList.append(IdVar)
+ else:
+ DeclList = var.Declarator.split('=')
+ Name = DeclList[0].strip()
+ if ArrayPattern.match(Name):
+ LSBPos = var.Declarator.find('[')
+ var.Modifier += ' ' + Name[LSBPos:]
+ Name = Name[0:LSBPos]
+ IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0], var.StartPos[1], var.EndPos[0], var.EndPos[1])
+ IdList.append(IdVar)
+
+ for enum in FileProfile.EnumerationDefinitionList:
+ LBPos = enum.Content.find('{')
+ RBPos = enum.Content.find('}')
+ Name = enum.Content[4:LBPos].strip()
+ Value = enum.Content[LBPos+1:RBPos]
+ IdEnum = DataClass.IdentifierClass(-1, '', '', Name, Value, DataClass.MODEL_IDENTIFIER_ENUMERATE, -1, -1, enum.StartPos[0], enum.StartPos[1], enum.EndPos[0], enum.EndPos[1])
+ IdList.append(IdEnum)
+
+ for su in FileProfile.StructUnionDefinitionList:
+ Type = DataClass.MODEL_IDENTIFIER_STRUCTURE
+ SkipLen = 6
+ if su.Content.startswith('union'):
+ Type = DataClass.MODEL_IDENTIFIER_UNION
+ SkipLen = 5
+ LBPos = su.Content.find('{')
+ RBPos = su.Content.find('}')
+ if LBPos == -1 or RBPos == -1:
+ Name = su.Content[SkipLen:].strip()
+ Value = ''
+ else:
+ Name = su.Content[SkipLen:LBPos].strip()
+ Value = su.Content[LBPos+1:RBPos]
+ IdPE = DataClass.IdentifierClass(-1, '', '', Name, Value, Type, -1, -1, su.StartPos[0], su.StartPos[1], su.EndPos[0], su.EndPos[1])
+ IdList.append(IdPE)
+
+ TdFuncPointerPattern = GetTypedefFuncPointerPattern()
+ for td in FileProfile.TypedefDefinitionList:
+ Modifier = ''
+ Name = td.ToType
+ Value = td.FromType
+ if TdFuncPointerPattern.match(td.ToType):
+ Modifier = td.FromType
+ LBPos = td.ToType.find('(')
+ TmpStr = td.ToType[LBPos+1:].strip()
+ StarPos = TmpStr.find('*')
+ if StarPos != -1:
+ Modifier += ' ' + TmpStr[0:StarPos]
+ while TmpStr[StarPos] == '*':
+ Modifier += ' ' + '*'
+ StarPos += 1
+ TmpStr = TmpStr[StarPos:].strip()
+ RBPos = TmpStr.find(')')
+ Name = TmpStr[0:RBPos]
+ Value = 'FP' + TmpStr[RBPos + 1:]
+
+ IdTd = DataClass.IdentifierClass(-1, Modifier, '', Name, Value, DataClass.MODEL_IDENTIFIER_TYPEDEF, -1, -1, td.StartPos[0], td.StartPos[1], td.EndPos[0], td.EndPos[1])
+ IdList.append(IdTd)
+
+ for funcCall in FileProfile.FunctionCallingList:
+ IdFC = DataClass.IdentifierClass(-1, '', '', funcCall.FuncName, funcCall.ParamList, DataClass.MODEL_IDENTIFIER_FUNCTION_CALLING, -1, -1, funcCall.StartPos[0], funcCall.StartPos[1], funcCall.EndPos[0], funcCall.EndPos[1])
+ IdList.append(IdFC)
+ return IdList
+
+## GetParamList() method
+#
+# Get a list of parameters
+#
+# @param FuncDeclarator: Function declarator
+# @param FuncNameLine: Line number of function name
+# @param FuncNameOffset: Offset of function name
+#
+# @return ParamIdList: A list of parameters
+#
+def GetParamList(FuncDeclarator, FuncNameLine = 0, FuncNameOffset = 0):
+ ParamIdList = []
+ DeclSplitList = FuncDeclarator.split('(')
+ if len(DeclSplitList) < 2:
+ return ParamIdList
+ FuncName = DeclSplitList[0]
+ ParamStr = DeclSplitList[1].rstrip(')')
+ LineSkipped = 0
+ OffsetSkipped = 0
+ Start = 0
+ while FuncName.find('\n', Start) != -1:
+ LineSkipped += 1
+ OffsetSkipped = 0
+ Start += FuncName.find('\n', Start)
+ Start += 1
+ OffsetSkipped += len(FuncName[Start:])
+ OffsetSkipped += 1 #skip '('
+ ParamBeginLine = FuncNameLine + LineSkipped
+ ParamBeginOffset = OffsetSkipped
+ for p in ParamStr.split(','):
+ ListP = p.split()
+ if len(ListP) == 0:
+ continue
+ ParamName = ListP[-1]
+ DeclText = ParamName.strip()
+ RightSpacePos = p.rfind(ParamName)
+ ParamModifier = p[0:RightSpacePos]
+ if ParamName == 'OPTIONAL':
+ if ParamModifier == '':
+ ParamModifier += ' ' + 'OPTIONAL'
+ DeclText = ''
+ else:
+ ParamName = ListP[-2]
+ DeclText = ParamName.strip()
+ RightSpacePos = p.rfind(ParamName)
+ ParamModifier = p[0:RightSpacePos]
+ ParamModifier += 'OPTIONAL'
+ while DeclText.startswith('*'):
+ ParamModifier += ' ' + '*'
+ DeclText = DeclText.lstrip('*').strip()
+ ParamName = DeclText
+
+ Start = 0
+ while p.find('\n', Start) != -1:
+ LineSkipped += 1
+ OffsetSkipped = 0
+ Start += p.find('\n', Start)
+ Start += 1
+ OffsetSkipped += len(p[Start:])
+
+ ParamEndLine = ParamBeginLine + LineSkipped
+ ParamEndOffset = OffsetSkipped
+ IdParam = DataClass.IdentifierClass(-1, ParamModifier, '', ParamName, '', DataClass.MODEL_IDENTIFIER_PARAMETER, -1, -1, ParamBeginLine, ParamBeginOffset, ParamEndLine, ParamEndOffset)
+ ParamIdList.append(IdParam)
+ ParamBeginLine = ParamEndLine
+ ParamBeginOffset = OffsetSkipped + 1 #skip ','
+
+ return ParamIdList
+
+## GetFunctionList()
+#
+# Get a list of functions
+#
+# @return FuncObjList: A list of function objects
+#
+def GetFunctionList():
+ FuncObjList = []
+ for FuncDef in FileProfile.FunctionDefinitionList:
+ ParamIdList = []
+ DeclText = FuncDef.Declarator.strip()
+ while DeclText.startswith('*'):
+ FuncDef.Modifier += '*'
+ DeclText = DeclText.lstrip('*').strip()
+
+ FuncDef.Declarator = FuncDef.Declarator.lstrip('*')
+ DeclSplitList = FuncDef.Declarator.split('(')
+ if len(DeclSplitList) < 2:
+ continue
+
+ FuncName = DeclSplitList[0]
+ FuncNamePartList = FuncName.split()
+ if len(FuncNamePartList) > 1:
+ FuncName = FuncNamePartList[-1]
+ Index = 0
+ while Index < len(FuncNamePartList) - 1:
+ FuncDef.Modifier += ' ' + FuncNamePartList[Index]
+ Index += 1
+
+ FuncObj = DataClass.FunctionClass(-1, FuncDef.Declarator, FuncDef.Modifier, FuncName.strip(), '', FuncDef.StartPos[0], FuncDef.StartPos[1], FuncDef.EndPos[0], FuncDef.EndPos[1], FuncDef.LeftBracePos[0], FuncDef.LeftBracePos[1], -1, ParamIdList, [])
+ FuncObjList.append(FuncObj)
+
+ return FuncObjList
+
+## CreateCCodeDB() method
+#
+# Create database for all c code
+#
+# @param FileNameList: A list of all c code file names
+#
+def CreateCCodeDB(FileNameList):
+ FileObjList = []
+ ParseErrorFileList = []
+ ParsedFiles = {}
+ for FullName in FileNameList:
+ if os.path.splitext(FullName)[1] in ('.h', '.c'):
+ if FullName.lower() in ParsedFiles:
+ continue
+ ParsedFiles[FullName.lower()] = 1
+ EdkLogger.info("Parsing " + FullName)
+ model = FullName.endswith('c') and DataClass.MODEL_FILE_C or DataClass.MODEL_FILE_H
+ collector = CodeFragmentCollector.CodeFragmentCollector(FullName)
+ try:
+ collector.ParseFile()
+ except:
+ ParseErrorFileList.append(FullName)
+ BaseName = os.path.basename(FullName)
+ DirName = os.path.dirname(FullName)
+ Ext = os.path.splitext(BaseName)[1].lstrip('.')
+ ModifiedTime = os.path.getmtime(FullName)
+ FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])
+ FileObjList.append(FileObj)
+ collector.CleanFileProfileBuffer()
+
+ if len(ParseErrorFileList) > 0:
+ EdkLogger.info("Found unrecoverable error during parsing:\n\t%s\n" % "\n\t".join(ParseErrorFileList))
+
+ Db = EotGlobalData.gDb
+ for file in FileObjList:
+ Db.InsertOneFile(file)
+
+ Db.UpdateIdentifierBelongsToFunction()
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ CollectSourceCodeDataIntoDB(sys.argv[1])
+
+ print('Done!')
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GNUmakefile b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GNUmakefile
new file mode 100644
index 00000000..d897bf2a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GNUmakefile
@@ -0,0 +1,12 @@
+## @file
+# GNU/Linux makefile for Python tools build.
+#
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+all:
+
+clean:
+ find . -name '*.pyc' -exec rm '{}' ';'
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/AprioriSection.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/AprioriSection.py
new file mode 100755
index 00000000..84ec0976
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/AprioriSection.py
@@ -0,0 +1,121 @@
+## @file
+# process APRIORI file data and generate PEI/DXE APRIORI file
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from struct import pack
+import Common.LongFilePathOs as os
+from io import BytesIO
+from .FfsFileStatement import FileStatement
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+from Common.StringUtils import NormPath
+from Common.Misc import SaveFileOnChange, PathClass
+from Common.EdkLogger import error as EdkLoggerError
+from Common.BuildToolError import RESOURCE_NOT_AVAILABLE
+from Common.DataType import TAB_COMMON
+
+DXE_APRIORI_GUID = "FC510EE7-FFDC-11D4-BD41-0080C73C8881"
+PEI_APRIORI_GUID = "1B45CC0A-156A-428A-AF62-49864DA0E6E6"
+
+## process APRIORI file data and generate PEI/DXE APRIORI file
+#
+#
+class AprioriSection (object):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.DefineVarDict = {}
+ self.FfsList = []
+ self.AprioriType = ""
+
+ ## GenFfs() method
+ #
+ # Generate FFS for APRIORI file
+ #
+ # @param self The object pointer
+ # @param FvName for whom apriori file generated
+ # @param Dict dictionary contains macro and its value
+ # @retval string Generated file name
+ #
+ def GenFfs (self, FvName, Dict = None, IsMakefile = False):
+ if Dict is None:
+ Dict = {}
+ Buffer = BytesIO()
+ if self.AprioriType == "PEI":
+ AprioriFileGuid = PEI_APRIORI_GUID
+ else:
+ AprioriFileGuid = DXE_APRIORI_GUID
+
+ OutputAprFilePath = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, \
+ GenFdsGlobalVariable.FfsDir,\
+ AprioriFileGuid + FvName)
+ if not os.path.exists(OutputAprFilePath):
+ os.makedirs(OutputAprFilePath)
+
+ OutputAprFileName = os.path.join( OutputAprFilePath, \
+ AprioriFileGuid + FvName + '.Apri' )
+ AprFfsFileName = os.path.join (OutputAprFilePath,\
+ AprioriFileGuid + FvName + '.Ffs')
+
+ Dict.update(self.DefineVarDict)
+ InfFileName = None
+ for FfsObj in self.FfsList:
+ Guid = ""
+ if isinstance(FfsObj, FileStatement):
+ Guid = FfsObj.NameGuid
+ else:
+ InfFileName = NormPath(FfsObj.InfFileName)
+ Arch = FfsObj.GetCurrentArch()
+
+ if Arch:
+ Dict['$(ARCH)'] = Arch
+ InfFileName = GenFdsGlobalVariable.MacroExtend(InfFileName, Dict, Arch)
+
+ if Arch:
+ Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(InfFileName, GenFdsGlobalVariable.WorkSpaceDir), Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
+ Guid = Inf.Guid
+ else:
+ Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(InfFileName, GenFdsGlobalVariable.WorkSpaceDir), TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
+ Guid = Inf.Guid
+
+ if not Inf.Module.Binaries:
+ EdkLoggerError("GenFds", RESOURCE_NOT_AVAILABLE,
+ "INF %s not found in build ARCH %s!" \
+ % (InfFileName, GenFdsGlobalVariable.ArchList))
+
+ GuidPart = Guid.split('-')
+ Buffer.write(pack('I', int(GuidPart[0], 16)))
+ Buffer.write(pack('H', int(GuidPart[1], 16)))
+ Buffer.write(pack('H', int(GuidPart[2], 16)))
+
+ for Num in range(2):
+ Char = GuidPart[3][Num*2:Num*2+2]
+ Buffer.write(pack('B', int(Char, 16)))
+
+ for Num in range(6):
+ Char = GuidPart[4][Num*2:Num*2+2]
+ Buffer.write(pack('B', int(Char, 16)))
+
+ SaveFileOnChange(OutputAprFileName, Buffer.getvalue())
+
+ RawSectionFileName = os.path.join( OutputAprFilePath, \
+ AprioriFileGuid + FvName + '.raw' )
+ MakefilePath = None
+ if IsMakefile:
+ if not InfFileName:
+ return None
+ MakefilePath = InfFileName, Arch
+ GenFdsGlobalVariable.GenerateSection(RawSectionFileName, [OutputAprFileName], 'EFI_SECTION_RAW', IsMakefile=IsMakefile)
+ GenFdsGlobalVariable.GenerateFfs(AprFfsFileName, [RawSectionFileName],
+ 'EFI_FV_FILETYPE_FREEFORM', AprioriFileGuid, MakefilePath=MakefilePath)
+
+ return AprFfsFileName
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Capsule.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Capsule.py
new file mode 100755
index 00000000..b155d80e
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Capsule.py
@@ -0,0 +1,250 @@
+## @file
+# generate capsule
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from .GenFdsGlobalVariable import GenFdsGlobalVariable, FindExtendTool
+from CommonDataClass.FdfClass import CapsuleClassObject
+import Common.LongFilePathOs as os
+from io import BytesIO
+from Common.Misc import SaveFileOnChange, PackGUID
+import uuid
+from struct import pack
+from Common import EdkLogger
+from Common.BuildToolError import GENFDS_ERROR
+from Common.DataType import TAB_LINE_BREAK
+
+WIN_CERT_REVISION = 0x0200
+WIN_CERT_TYPE_EFI_GUID = 0x0EF1
+EFI_CERT_TYPE_PKCS7_GUID = uuid.UUID('{4aafd29d-68df-49ee-8aa9-347d375665a7}')
+EFI_CERT_TYPE_RSA2048_SHA256_GUID = uuid.UUID('{a7717414-c616-4977-9420-844712a735bf}')
+
+## create inf file describes what goes into capsule and call GenFv to generate capsule
+#
+#
+class Capsule (CapsuleClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ CapsuleClassObject.__init__(self)
+ # For GenFv
+ self.BlockSize = None
+ # For GenFv
+ self.BlockNum = None
+ self.CapsuleName = None
+
+ ## Generate FMP capsule
+ #
+ # @retval string Generated Capsule file path
+ #
+ def GenFmpCapsule(self):
+ #
+ # Generate capsule header
+ # typedef struct {
+ # EFI_GUID CapsuleGuid;
+ # UINT32 HeaderSize;
+ # UINT32 Flags;
+ # UINT32 CapsuleImageSize;
+ # } EFI_CAPSULE_HEADER;
+ #
+ Header = BytesIO()
+ #
+ # Use FMP capsule GUID: 6DCBD5ED-E82D-4C44-BDA1-7194199AD92A
+ #
+ Header.write(PackGUID('6DCBD5ED-E82D-4C44-BDA1-7194199AD92A'.split('-')))
+ HdrSize = 0
+ if 'CAPSULE_HEADER_SIZE' in self.TokensDict:
+ Header.write(pack('=I', int(self.TokensDict['CAPSULE_HEADER_SIZE'], 16)))
+ HdrSize = int(self.TokensDict['CAPSULE_HEADER_SIZE'], 16)
+ else:
+ Header.write(pack('=I', 0x20))
+ HdrSize = 0x20
+ Flags = 0
+ if 'CAPSULE_FLAGS' in self.TokensDict:
+ for flag in self.TokensDict['CAPSULE_FLAGS'].split(','):
+ flag = flag.strip()
+ if flag == 'PopulateSystemTable':
+ Flags |= 0x00010000 | 0x00020000
+ elif flag == 'PersistAcrossReset':
+ Flags |= 0x00010000
+ elif flag == 'InitiateReset':
+ Flags |= 0x00040000
+ Header.write(pack('=I', Flags))
+ #
+ # typedef struct {
+ # UINT32 Version;
+ # UINT16 EmbeddedDriverCount;
+ # UINT16 PayloadItemCount;
+ # // UINT64 ItemOffsetList[];
+ # } EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER;
+ #
+ FwMgrHdr = BytesIO()
+ if 'CAPSULE_HEADER_INIT_VERSION' in self.TokensDict:
+ FwMgrHdr.write(pack('=I', int(self.TokensDict['CAPSULE_HEADER_INIT_VERSION'], 16)))
+ else:
+ FwMgrHdr.write(pack('=I', 0x00000001))
+ FwMgrHdr.write(pack('=HH', len(self.CapsuleDataList), len(self.FmpPayloadList)))
+ FwMgrHdrSize = 4+2+2+8*(len(self.CapsuleDataList)+len(self.FmpPayloadList))
+
+ #
+ # typedef struct _WIN_CERTIFICATE {
+ # UINT32 dwLength;
+ # UINT16 wRevision;
+ # UINT16 wCertificateType;
+ # //UINT8 bCertificate[ANYSIZE_ARRAY];
+ # } WIN_CERTIFICATE;
+ #
+ # typedef struct _WIN_CERTIFICATE_UEFI_GUID {
+ # WIN_CERTIFICATE Hdr;
+ # EFI_GUID CertType;
+ # //UINT8 CertData[ANYSIZE_ARRAY];
+ # } WIN_CERTIFICATE_UEFI_GUID;
+ #
+ # typedef struct {
+ # UINT64 MonotonicCount;
+ # WIN_CERTIFICATE_UEFI_GUID AuthInfo;
+ # } EFI_FIRMWARE_IMAGE_AUTHENTICATION;
+ #
+ # typedef struct _EFI_CERT_BLOCK_RSA_2048_SHA256 {
+ # EFI_GUID HashType;
+ # UINT8 PublicKey[256];
+ # UINT8 Signature[256];
+ # } EFI_CERT_BLOCK_RSA_2048_SHA256;
+ #
+
+ PreSize = FwMgrHdrSize
+ Content = BytesIO()
+ for driver in self.CapsuleDataList:
+ FileName = driver.GenCapsuleSubItem()
+ FwMgrHdr.write(pack('=Q', PreSize))
+ PreSize += os.path.getsize(FileName)
+ File = open(FileName, 'rb')
+ Content.write(File.read())
+ File.close()
+ for fmp in self.FmpPayloadList:
+ if fmp.Existed:
+ FwMgrHdr.write(pack('=Q', PreSize))
+ PreSize += len(fmp.Buffer)
+ Content.write(fmp.Buffer)
+ continue
+ if fmp.ImageFile:
+ for Obj in fmp.ImageFile:
+ fmp.ImageFile = Obj.GenCapsuleSubItem()
+ if fmp.VendorCodeFile:
+ for Obj in fmp.VendorCodeFile:
+ fmp.VendorCodeFile = Obj.GenCapsuleSubItem()
+ if fmp.Certificate_Guid:
+ ExternalTool, ExternalOption = FindExtendTool([], GenFdsGlobalVariable.ArchList, fmp.Certificate_Guid)
+ CmdOption = ''
+ CapInputFile = fmp.ImageFile
+ if not os.path.isabs(fmp.ImageFile):
+ CapInputFile = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, fmp.ImageFile)
+ CapOutputTmp = os.path.join(GenFdsGlobalVariable.FvDir, self.UiCapsuleName) + '.tmp'
+ if ExternalTool is None:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % fmp.Certificate_Guid)
+ else:
+ CmdOption += ExternalTool
+ if ExternalOption:
+ CmdOption = CmdOption + ' ' + ExternalOption
+ CmdOption += ' -e ' + ' --monotonic-count ' + str(fmp.MonotonicCount) + ' -o ' + CapOutputTmp + ' ' + CapInputFile
+ CmdList = CmdOption.split()
+ GenFdsGlobalVariable.CallExternalTool(CmdList, "Failed to generate FMP auth capsule")
+ if uuid.UUID(fmp.Certificate_Guid) == EFI_CERT_TYPE_PKCS7_GUID:
+ dwLength = 4 + 2 + 2 + 16 + os.path.getsize(CapOutputTmp) - os.path.getsize(CapInputFile)
+ else:
+ dwLength = 4 + 2 + 2 + 16 + 16 + 256 + 256
+ fmp.ImageFile = CapOutputTmp
+ AuthData = [fmp.MonotonicCount, dwLength, WIN_CERT_REVISION, WIN_CERT_TYPE_EFI_GUID, fmp.Certificate_Guid]
+ fmp.Buffer = fmp.GenCapsuleSubItem(AuthData)
+ else:
+ fmp.Buffer = fmp.GenCapsuleSubItem()
+ FwMgrHdr.write(pack('=Q', PreSize))
+ PreSize += len(fmp.Buffer)
+ Content.write(fmp.Buffer)
+ BodySize = len(FwMgrHdr.getvalue()) + len(Content.getvalue())
+ Header.write(pack('=I', HdrSize + BodySize))
+ #
+ # The real capsule header structure is 28 bytes
+ #
+ Header.write(b'\x00'*(HdrSize-28))
+ Header.write(FwMgrHdr.getvalue())
+ Header.write(Content.getvalue())
+ #
+ # Generate FMP capsule file
+ #
+ CapOutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiCapsuleName) + '.Cap'
+ SaveFileOnChange(CapOutputFile, Header.getvalue(), True)
+ return CapOutputFile
+
+ ## Generate capsule
+ #
+ # @param self The object pointer
+ # @retval string Generated Capsule file path
+ #
+ def GenCapsule(self):
+ if self.UiCapsuleName.upper() + 'cap' in GenFdsGlobalVariable.ImageBinDict:
+ return GenFdsGlobalVariable.ImageBinDict[self.UiCapsuleName.upper() + 'cap']
+
+ GenFdsGlobalVariable.InfLogger( "\nGenerate %s Capsule" %self.UiCapsuleName)
+ if ('CAPSULE_GUID' in self.TokensDict and
+ uuid.UUID(self.TokensDict['CAPSULE_GUID']) == uuid.UUID('6DCBD5ED-E82D-4C44-BDA1-7194199AD92A')):
+ return self.GenFmpCapsule()
+
+ CapInfFile = self.GenCapInf()
+ CapInfFile.append("[files]" + TAB_LINE_BREAK)
+ CapFileList = []
+ for CapsuleDataObj in self.CapsuleDataList:
+ CapsuleDataObj.CapsuleName = self.CapsuleName
+ FileName = CapsuleDataObj.GenCapsuleSubItem()
+ CapsuleDataObj.CapsuleName = None
+ CapFileList.append(FileName)
+ CapInfFile.append("EFI_FILE_NAME = " + \
+ FileName + \
+ TAB_LINE_BREAK)
+ SaveFileOnChange(self.CapInfFileName, ''.join(CapInfFile), False)
+ #
+ # Call GenFv tool to generate capsule
+ #
+ CapOutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiCapsuleName)
+ CapOutputFile = CapOutputFile + '.Cap'
+ GenFdsGlobalVariable.GenerateFirmwareVolume(
+ CapOutputFile,
+ [self.CapInfFileName],
+ Capsule=True,
+ FfsList=CapFileList
+ )
+
+ GenFdsGlobalVariable.VerboseLogger( "\nGenerate %s Capsule Successfully" %self.UiCapsuleName)
+ GenFdsGlobalVariable.SharpCounter = 0
+ GenFdsGlobalVariable.ImageBinDict[self.UiCapsuleName.upper() + 'cap'] = CapOutputFile
+ return CapOutputFile
+
+ ## Generate inf file for capsule
+ #
+ # @param self The object pointer
+ # @retval file inf file object
+ #
+ def GenCapInf(self):
+ self.CapInfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
+ self.UiCapsuleName + "_Cap" + '.inf')
+ CapInfFile = []
+
+ CapInfFile.append("[options]" + TAB_LINE_BREAK)
+
+ for Item in self.TokensDict:
+ CapInfFile.append("EFI_" + \
+ Item + \
+ ' = ' + \
+ self.TokensDict[Item] + \
+ TAB_LINE_BREAK)
+
+ return CapInfFile
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/CapsuleData.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/CapsuleData.py
new file mode 100755
index 00000000..c7b3b0ca
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/CapsuleData.py
@@ -0,0 +1,239 @@
+## @file
+# generate capsule
+#
+# Copyright (c) 2007-2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+from io import BytesIO
+from struct import pack
+import os
+from Common.Misc import SaveFileOnChange
+import uuid
+
+## base class for capsule data
+#
+#
+class CapsuleData:
+ ## The constructor
+ #
+ # @param self The object pointer
+ def __init__(self):
+ pass
+
+ ## generate capsule data
+ #
+ # @param self The object pointer
+ def GenCapsuleSubItem(self):
+ pass
+
+## FFS class for capsule data
+#
+#
+class CapsuleFfs (CapsuleData):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self) :
+ self.Ffs = None
+ self.FvName = None
+
+ ## generate FFS capsule data
+ #
+ # @param self The object pointer
+ # @retval string Generated file name
+ #
+ def GenCapsuleSubItem(self):
+ FfsFile = self.Ffs.GenFfs()
+ return FfsFile
+
+## FV class for capsule data
+#
+#
+class CapsuleFv (CapsuleData):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self) :
+ self.Ffs = None
+ self.FvName = None
+ self.CapsuleName = None
+
+ ## generate FV capsule data
+ #
+ # @param self The object pointer
+ # @retval string Generated file name
+ #
+ def GenCapsuleSubItem(self):
+ if self.FvName.find('.fv') == -1:
+ if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
+ FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[self.FvName.upper()]
+ FdBuffer = BytesIO()
+ FvObj.CapsuleName = self.CapsuleName
+ FvFile = FvObj.AddToBuffer(FdBuffer)
+ FvObj.CapsuleName = None
+ FdBuffer.close()
+ return FvFile
+ else:
+ FvFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName)
+ return FvFile
+
+## FD class for capsule data
+#
+#
+class CapsuleFd (CapsuleData):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self) :
+ self.Ffs = None
+ self.FdName = None
+ self.CapsuleName = None
+
+ ## generate FD capsule data
+ #
+ # @param self The object pointer
+ # @retval string Generated file name
+ #
+ def GenCapsuleSubItem(self):
+ if self.FdName.find('.fd') == -1:
+ if self.FdName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict:
+ FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[self.FdName.upper()]
+ FdFile = FdObj.GenFd()
+ return FdFile
+ else:
+ FdFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName)
+ return FdFile
+
+## AnyFile class for capsule data
+#
+#
+class CapsuleAnyFile (CapsuleData):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self) :
+ self.Ffs = None
+ self.FileName = None
+
+ ## generate AnyFile capsule data
+ #
+ # @param self The object pointer
+ # @retval string Generated file name
+ #
+ def GenCapsuleSubItem(self):
+ return self.FileName
+
+## Afile class for capsule data
+#
+#
+class CapsuleAfile (CapsuleData):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self) :
+ self.Ffs = None
+ self.FileName = None
+
+ ## generate Afile capsule data
+ #
+ # @param self The object pointer
+ # @retval string Generated file name
+ #
+ def GenCapsuleSubItem(self):
+ return self.FileName
+
+class CapsulePayload(CapsuleData):
+ '''Generate payload file, the header is defined below:
+ #pragma pack(1)
+ typedef struct {
+ UINT32 Version;
+ EFI_GUID UpdateImageTypeId;
+ UINT8 UpdateImageIndex;
+ UINT8 reserved_bytes[3];
+ UINT32 UpdateImageSize;
+ UINT32 UpdateVendorCodeSize;
+ UINT64 UpdateHardwareInstance; //Introduced in v2
+ } EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER;
+ '''
+ def __init__(self):
+ self.UiName = None
+ self.Version = None
+ self.ImageTypeId = None
+ self.ImageIndex = None
+ self.HardwareInstance = None
+ self.ImageFile = []
+ self.VendorCodeFile = []
+ self.Certificate_Guid = None
+ self.MonotonicCount = None
+ self.Existed = False
+ self.Buffer = None
+
+ def GenCapsuleSubItem(self, AuthData=[]):
+ if not self.Version:
+ self.Version = '0x00000002'
+ if not self.ImageIndex:
+ self.ImageIndex = '0x1'
+ if not self.HardwareInstance:
+ self.HardwareInstance = '0x0'
+ ImageFileSize = os.path.getsize(self.ImageFile)
+ if AuthData:
+ # the ImageFileSize need include the full authenticated info size. From first bytes of MonotonicCount to last bytes of certificate.
+ # the 32 bit is the MonotonicCount, dwLength, wRevision, wCertificateType and CertType
+ ImageFileSize += 32
+ VendorFileSize = 0
+ if self.VendorCodeFile:
+ VendorFileSize = os.path.getsize(self.VendorCodeFile)
+
+ #
+ # Fill structure
+ #
+ Guid = self.ImageTypeId.split('-')
+ Buffer = pack('=ILHHBBBBBBBBBBBBIIQ',
+ int(self.Version, 16),
+ int(Guid[0], 16),
+ int(Guid[1], 16),
+ int(Guid[2], 16),
+ int(Guid[3][-4:-2], 16),
+ int(Guid[3][-2:], 16),
+ int(Guid[4][-12:-10], 16),
+ int(Guid[4][-10:-8], 16),
+ int(Guid[4][-8:-6], 16),
+ int(Guid[4][-6:-4], 16),
+ int(Guid[4][-4:-2], 16),
+ int(Guid[4][-2:], 16),
+ int(self.ImageIndex, 16),
+ 0,
+ 0,
+ 0,
+ ImageFileSize,
+ VendorFileSize,
+ int(self.HardwareInstance, 16)
+ )
+ if AuthData:
+ Buffer += pack('QIHH', AuthData[0], AuthData[1], AuthData[2], AuthData[3])
+ Buffer += uuid.UUID(AuthData[4]).bytes_le
+
+ #
+ # Append file content to the structure
+ #
+ ImageFile = open(self.ImageFile, 'rb')
+ Buffer += ImageFile.read()
+ ImageFile.close()
+ if self.VendorCodeFile:
+ VendorFile = open(self.VendorCodeFile, 'rb')
+ Buffer += VendorFile.read()
+ VendorFile.close()
+ self.Existed = True
+ return Buffer
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/CompressSection.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/CompressSection.py
new file mode 100755
index 00000000..8d9e20e9
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/CompressSection.py
@@ -0,0 +1,96 @@
+## @file
+# process compress section generation
+#
+# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from .Ffs import SectionSuffix
+from . import Section
+import subprocess
+import Common.LongFilePathOs as os
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+from CommonDataClass.FdfClass import CompressSectionClassObject
+from Common.DataType import *
+
+## generate compress section
+#
+#
+class CompressSection (CompressSectionClassObject) :
+
+ ## compress types: PI standard and non PI standard
+ CompTypeDict = {
+ 'PI_STD' : 'PI_STD',
+ 'PI_NONE' : 'PI_NONE'
+ }
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ CompressSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate compressed section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = None, IsMakefile = False):
+
+ if FfsInf is not None:
+ self.CompType = FfsInf.__ExtendMacro__(self.CompType)
+ self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
+
+ SectFiles = tuple()
+ SectAlign = []
+ Index = 0
+ MaxAlign = None
+ if Dict is None:
+ Dict = {}
+ for Sect in self.SectionList:
+ Index = Index + 1
+ SecIndex = '%s.%d' %(SecNum, Index)
+ ReturnSectList, AlignValue = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList, FfsInf, Dict, IsMakefile=IsMakefile)
+ if AlignValue is not None:
+ if MaxAlign is None:
+ MaxAlign = AlignValue
+ if GenFdsGlobalVariable.GetAlignment (AlignValue) > GenFdsGlobalVariable.GetAlignment (MaxAlign):
+ MaxAlign = AlignValue
+ if ReturnSectList != []:
+ if AlignValue is None:
+ AlignValue = "1"
+ for FileData in ReturnSectList:
+ SectFiles += (FileData,)
+ SectAlign.append(AlignValue)
+
+ OutputFile = OutputPath + \
+ os.sep + \
+ ModuleName + \
+ SUP_MODULE_SEC + \
+ SecNum + \
+ SectionSuffix['COMPRESS']
+ OutputFile = os.path.normpath(OutputFile)
+ DummyFile = OutputFile + '.dummy'
+ GenFdsGlobalVariable.GenerateSection(DummyFile, SectFiles, InputAlign=SectAlign, IsMakefile=IsMakefile)
+
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [DummyFile], Section.Section.SectionType['COMPRESS'],
+ CompressionType=self.CompTypeDict[self.CompType], IsMakefile=IsMakefile)
+ OutputFileList = []
+ OutputFileList.append(OutputFile)
+ return OutputFileList, self.Alignment
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/DataSection.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/DataSection.py
new file mode 100755
index 00000000..4391d333
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/DataSection.py
@@ -0,0 +1,117 @@
+## @file
+# process data section generation
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from . import Section
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+import subprocess
+from .Ffs import SectionSuffix
+import Common.LongFilePathOs as os
+from CommonDataClass.FdfClass import DataSectionClassObject
+from Common.Misc import PeImageClass
+from Common.LongFilePathSupport import CopyLongFilePath
+from Common.DataType import *
+
+## generate data section
+#
+#
+class DataSection (DataSectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ DataSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate compressed section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name list, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, keyStringList, FfsFile = None, Dict = None, IsMakefile = False):
+ #
+ # Prepare the parameter of GenSection
+ #
+ if Dict is None:
+ Dict = {}
+ if FfsFile is not None:
+ self.SectFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.SectFileName)
+ self.SectFileName = GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict, FfsFile.CurrentArch)
+ else:
+ self.SectFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.SectFileName)
+ self.SectFileName = GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict)
+
+ """Check Section file exist or not !"""
+
+ if not os.path.exists(self.SectFileName):
+ self.SectFileName = os.path.join (GenFdsGlobalVariable.WorkSpaceDir,
+ self.SectFileName)
+
+ """Copy Map file to Ffs output"""
+ Filename = GenFdsGlobalVariable.MacroExtend(self.SectFileName)
+ if Filename[(len(Filename)-4):] == '.efi':
+ MapFile = Filename.replace('.efi', '.map')
+ CopyMapFile = os.path.join(OutputPath, ModuleName + '.map')
+ if IsMakefile:
+ if GenFdsGlobalVariable.CopyList == []:
+ GenFdsGlobalVariable.CopyList = [(MapFile, CopyMapFile)]
+ else:
+ GenFdsGlobalVariable.CopyList.append((MapFile, CopyMapFile))
+ else:
+ if os.path.exists(MapFile):
+ if not os.path.exists(CopyMapFile) or (os.path.getmtime(MapFile) > os.path.getmtime(CopyMapFile)):
+ CopyLongFilePath(MapFile, CopyMapFile)
+
+ #Get PE Section alignment when align is set to AUTO
+ if self.Alignment == 'Auto' and self.SecType in (BINARY_FILE_TYPE_TE, BINARY_FILE_TYPE_PE32):
+ self.Alignment = "0"
+ NoStrip = True
+ if self.SecType in (BINARY_FILE_TYPE_TE, BINARY_FILE_TYPE_PE32):
+ if self.KeepReloc is not None:
+ NoStrip = self.KeepReloc
+
+ if not NoStrip:
+ FileBeforeStrip = os.path.join(OutputPath, ModuleName + '.efi')
+ if not os.path.exists(FileBeforeStrip) or \
+ (os.path.getmtime(self.SectFileName) > os.path.getmtime(FileBeforeStrip)):
+ CopyLongFilePath(self.SectFileName, FileBeforeStrip)
+ StrippedFile = os.path.join(OutputPath, ModuleName + '.stripped')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ StrippedFile,
+ [GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict)],
+ Strip=True,
+ IsMakefile = IsMakefile
+ )
+ self.SectFileName = StrippedFile
+
+ if self.SecType == BINARY_FILE_TYPE_TE:
+ TeFile = os.path.join( OutputPath, ModuleName + 'Te.raw')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ TeFile,
+ [GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict)],
+ Type='te',
+ IsMakefile = IsMakefile
+ )
+ self.SectFileName = TeFile
+
+ OutputFile = os.path.join (OutputPath, ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get(self.SecType))
+ OutputFile = os.path.normpath(OutputFile)
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [self.SectFileName], Section.Section.SectionType.get(self.SecType), IsMakefile = IsMakefile)
+ FileList = [OutputFile]
+ return FileList, self.Alignment
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/DepexSection.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/DepexSection.py
new file mode 100755
index 00000000..b2bcf26a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/DepexSection.py
@@ -0,0 +1,111 @@
+## @file
+# process depex section generation
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from . import Section
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+import Common.LongFilePathOs as os
+from CommonDataClass.FdfClass import DepexSectionClassObject
+from AutoGen.GenDepex import DependencyExpression
+from Common import EdkLogger
+from Common.BuildToolError import *
+from Common.Misc import PathClass
+from Common.DataType import *
+
+## generate data section
+#
+#
+class DepexSection (DepexSectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ DepexSectionClassObject.__init__(self)
+
+ def __FindGuidValue(self, CName):
+ for Arch in GenFdsGlobalVariable.ArchList:
+ PkgList = GenFdsGlobalVariable.WorkSpace.GetPackageList(GenFdsGlobalVariable.ActivePlatform,
+ Arch,
+ GenFdsGlobalVariable.TargetName,
+ GenFdsGlobalVariable.ToolChainTag)
+ for Inf in GenFdsGlobalVariable.FdfParser.Profile.InfList:
+ ModuleData = GenFdsGlobalVariable.WorkSpace.BuildObject[
+ PathClass(Inf, GenFdsGlobalVariable.WorkSpaceDir),
+ Arch,
+ GenFdsGlobalVariable.TargetName,
+ GenFdsGlobalVariable.ToolChainTag
+ ]
+ for Pkg in ModuleData.Packages:
+ if Pkg not in PkgList:
+ PkgList.append(Pkg)
+ for PkgDb in PkgList:
+ if CName in PkgDb.Ppis:
+ return PkgDb.Ppis[CName]
+ if CName in PkgDb.Protocols:
+ return PkgDb.Protocols[CName]
+ if CName in PkgDb.Guids:
+ return PkgDb.Guids[CName]
+ return None
+
+ ## GenSection() method
+ #
+ # Generate compressed section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name list, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, keyStringList, FfsFile = None, Dict = None, IsMakefile = False):
+ if self.ExpressionProcessed == False:
+ self.Expression = self.Expression.replace("\n", " ").replace("\r", " ")
+ ExpList = self.Expression.split()
+
+ for Exp in ExpList:
+ if Exp.upper() not in ('AND', 'OR', 'NOT', 'TRUE', 'FALSE', 'SOR', 'BEFORE', 'AFTER', 'END'):
+ GuidStr = self.__FindGuidValue(Exp)
+ if GuidStr is None:
+ EdkLogger.error("GenFds", RESOURCE_NOT_AVAILABLE,
+ "Depex GUID %s could not be found in build DB! (ModuleName: %s)" % (Exp, ModuleName))
+
+ self.Expression = self.Expression.replace(Exp, GuidStr)
+
+ self.Expression = self.Expression.strip()
+ self.ExpressionProcessed = True
+
+ if self.DepexType == 'PEI_DEPEX_EXP':
+ ModuleType = SUP_MODULE_PEIM
+ SecType = BINARY_FILE_TYPE_PEI_DEPEX
+ elif self.DepexType == 'DXE_DEPEX_EXP':
+ ModuleType = SUP_MODULE_DXE_DRIVER
+ SecType = BINARY_FILE_TYPE_DXE_DEPEX
+ elif self.DepexType == 'SMM_DEPEX_EXP':
+ ModuleType = SUP_MODULE_DXE_SMM_DRIVER
+ SecType = BINARY_FILE_TYPE_SMM_DEPEX
+ else:
+ EdkLogger.error("GenFds", FORMAT_INVALID,
+ "Depex type %s is not valid for module %s" % (self.DepexType, ModuleName))
+
+ InputFile = os.path.join (OutputPath, ModuleName + SUP_MODULE_SEC + SecNum + '.depex')
+ InputFile = os.path.normpath(InputFile)
+ Depex = DependencyExpression(self.Expression, ModuleType)
+ Depex.Generate(InputFile)
+
+ OutputFile = os.path.join (OutputPath, ModuleName + SUP_MODULE_SEC + SecNum + '.dpx')
+ OutputFile = os.path.normpath(OutputFile)
+
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [InputFile], Section.Section.SectionType.get (SecType), IsMakefile=IsMakefile)
+ return [OutputFile], self.Alignment
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/EfiSection.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/EfiSection.py
new file mode 100755
index 00000000..0e0c0dff
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/EfiSection.py
@@ -0,0 +1,318 @@
+## @file
+# process rule section generation
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from struct import *
+from . import Section
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+import subprocess
+from .Ffs import SectionSuffix
+import Common.LongFilePathOs as os
+from CommonDataClass.FdfClass import EfiSectionClassObject
+from Common import EdkLogger
+from Common.BuildToolError import *
+from Common.Misc import PeImageClass
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.LongFilePathSupport import CopyLongFilePath
+from Common.DataType import *
+
+## generate rule section
+#
+#
+class EfiSection (EfiSectionClassObject):
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ EfiSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate rule section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name list, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = None, IsMakefile = False) :
+
+ if self.FileName is not None and self.FileName.startswith('PCD('):
+ self.FileName = GenFdsGlobalVariable.GetPcdValue(self.FileName)
+ """Prepare the parameter of GenSection"""
+ if FfsInf is not None :
+ InfFileName = FfsInf.InfFileName
+ SectionType = FfsInf.__ExtendMacro__(self.SectionType)
+ Filename = FfsInf.__ExtendMacro__(self.FileName)
+ BuildNum = FfsInf.__ExtendMacro__(self.BuildNum)
+ StringData = FfsInf.__ExtendMacro__(self.StringData)
+ ModuleNameStr = FfsInf.__ExtendMacro__('$(MODULE_NAME)')
+ NoStrip = True
+ if FfsInf.ModuleType in (SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, SUP_MODULE_MM_CORE_STANDALONE) and SectionType in (BINARY_FILE_TYPE_TE, BINARY_FILE_TYPE_PE32):
+ if FfsInf.KeepReloc is not None:
+ NoStrip = FfsInf.KeepReloc
+ elif FfsInf.KeepRelocFromRule is not None:
+ NoStrip = FfsInf.KeepRelocFromRule
+ elif self.KeepReloc is not None:
+ NoStrip = self.KeepReloc
+ elif FfsInf.ShadowFromInfFile is not None:
+ NoStrip = FfsInf.ShadowFromInfFile
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s apply rule for None!" %ModuleName)
+
+ """If the file name was pointed out, add it in FileList"""
+ FileList = []
+ if Dict is None:
+ Dict = {}
+ if Filename is not None:
+ Filename = GenFdsGlobalVariable.MacroExtend(Filename, Dict)
+ # check if the path is absolute or relative
+ if os.path.isabs(Filename):
+ Filename = os.path.normpath(Filename)
+ else:
+ Filename = os.path.normpath(os.path.join(FfsInf.EfiOutputPath, Filename))
+
+ if not self.Optional:
+ FileList.append(Filename)
+ elif os.path.exists(Filename):
+ FileList.append(Filename)
+ elif IsMakefile:
+ SuffixMap = FfsInf.GetFinalTargetSuffixMap()
+ if '.depex' in SuffixMap:
+ FileList.append(Filename)
+ else:
+ FileList, IsSect = Section.Section.GetFileList(FfsInf, self.FileType, self.FileExtension, Dict, IsMakefile=IsMakefile, SectionType=SectionType)
+ if IsSect :
+ return FileList, self.Alignment
+
+ Index = 0
+ Align = self.Alignment
+
+ """ If Section type is 'VERSION'"""
+ OutputFileList = []
+ if SectionType == 'VERSION':
+
+ InfOverrideVerString = False
+ if FfsInf.Version is not None:
+ #StringData = FfsInf.Version
+ BuildNum = FfsInf.Version
+ InfOverrideVerString = True
+
+ if InfOverrideVerString:
+ #VerTuple = ('-n', '"' + StringData + '"')
+ if BuildNum is not None and BuildNum != '':
+ BuildNumTuple = ('-j', BuildNum)
+ else:
+ BuildNumTuple = tuple()
+
+ Num = SecNum
+ OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + str(Num) + SectionSuffix.get(SectionType))
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
+ #Ui=StringData,
+ Ver=BuildNum,
+ IsMakefile=IsMakefile)
+ OutputFileList.append(OutputFile)
+
+ elif FileList != []:
+ for File in FileList:
+ Index = Index + 1
+ Num = '%s.%d' %(SecNum, Index)
+ OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + Num + SectionSuffix.get(SectionType))
+ f = open(File, 'r')
+ VerString = f.read()
+ f.close()
+ BuildNum = VerString
+ if BuildNum is not None and BuildNum != '':
+ BuildNumTuple = ('-j', BuildNum)
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
+ #Ui=VerString,
+ Ver=BuildNum,
+ IsMakefile=IsMakefile)
+ OutputFileList.append(OutputFile)
+
+ else:
+ BuildNum = StringData
+ if BuildNum is not None and BuildNum != '':
+ BuildNumTuple = ('-j', BuildNum)
+ else:
+ BuildNumTuple = tuple()
+ BuildNumString = ' ' + ' '.join(BuildNumTuple)
+
+ #if VerString == '' and
+ if BuildNumString == '':
+ if self.Optional == True :
+ GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!")
+ return [], None
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "File: %s miss Version Section value" %InfFileName)
+ Num = SecNum
+ OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + str(Num) + SectionSuffix.get(SectionType))
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
+ #Ui=VerString,
+ Ver=BuildNum,
+ IsMakefile=IsMakefile)
+ OutputFileList.append(OutputFile)
+
+ #
+ # If Section Type is BINARY_FILE_TYPE_UI
+ #
+ elif SectionType == BINARY_FILE_TYPE_UI:
+
+ InfOverrideUiString = False
+ if FfsInf.Ui is not None:
+ StringData = FfsInf.Ui
+ InfOverrideUiString = True
+
+ if InfOverrideUiString:
+ Num = SecNum
+ if IsMakefile and StringData == ModuleNameStr:
+ StringData = "$(MODULE_NAME)"
+ OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + str(Num) + SectionSuffix.get(SectionType))
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_USER_INTERFACE',
+ Ui=StringData, IsMakefile=IsMakefile)
+ OutputFileList.append(OutputFile)
+
+ elif FileList != []:
+ for File in FileList:
+ Index = Index + 1
+ Num = '%s.%d' %(SecNum, Index)
+ OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + Num + SectionSuffix.get(SectionType))
+ f = open(File, 'r')
+ UiString = f.read()
+ f.close()
+ if IsMakefile and UiString == ModuleNameStr:
+ UiString = "$(MODULE_NAME)"
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_USER_INTERFACE',
+ Ui=UiString, IsMakefile=IsMakefile)
+ OutputFileList.append(OutputFile)
+ else:
+ if StringData is not None and len(StringData) > 0:
+ UiTuple = ('-n', '"' + StringData + '"')
+ else:
+ UiTuple = tuple()
+
+ if self.Optional == True :
+ GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!")
+ return '', None
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "File: %s miss UI Section value" %InfFileName)
+
+ Num = SecNum
+ if IsMakefile and StringData == ModuleNameStr:
+ StringData = "$(MODULE_NAME)"
+ OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + str(Num) + SectionSuffix.get(SectionType))
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_USER_INTERFACE',
+ Ui=StringData, IsMakefile=IsMakefile)
+ OutputFileList.append(OutputFile)
+
+ #
+ # If Section Type is BINARY_FILE_TYPE_RAW
+ #
+ elif SectionType == BINARY_FILE_TYPE_RAW:
+ """If File List is empty"""
+ if FileList == []:
+ if self.Optional == True:
+ GenFdsGlobalVariable.VerboseLogger("Optional Section don't exist!")
+ return [], None
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Output file for %s section could not be found for %s" % (SectionType, InfFileName))
+
+ elif len(FileList) > 1:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "Files suffixed with %s are not allowed to have more than one file in %s[Binaries] section" % (
+ self.FileExtension, InfFileName))
+ else:
+ for File in FileList:
+ File = GenFdsGlobalVariable.MacroExtend(File, Dict)
+ OutputFileList.append(File)
+
+ else:
+ """If File List is empty"""
+ if FileList == [] :
+ if self.Optional == True:
+ GenFdsGlobalVariable.VerboseLogger("Optional Section don't exist!")
+ return [], None
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Output file for %s section could not be found for %s" % (SectionType, InfFileName))
+
+ else:
+ """Convert the File to Section file one by one """
+ for File in FileList:
+ """ Copy Map file to FFS output path """
+ Index = Index + 1
+ Num = '%s.%d' %(SecNum, Index)
+ OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + Num + SectionSuffix.get(SectionType))
+ File = GenFdsGlobalVariable.MacroExtend(File, Dict)
+
+ #Get PE Section alignment when align is set to AUTO
+ if self.Alignment == 'Auto' and (SectionType == BINARY_FILE_TYPE_PE32 or SectionType == BINARY_FILE_TYPE_TE):
+ Align = "0"
+ if File[(len(File)-4):] == '.efi' and FfsInf.InfModule.BaseName == os.path.basename(File)[:-4]:
+ MapFile = File.replace('.efi', '.map')
+ CopyMapFile = os.path.join(OutputPath, ModuleName + '.map')
+ if IsMakefile:
+ if GenFdsGlobalVariable.CopyList == []:
+ GenFdsGlobalVariable.CopyList = [(MapFile, CopyMapFile)]
+ else:
+ GenFdsGlobalVariable.CopyList.append((MapFile, CopyMapFile))
+ else:
+ if os.path.exists(MapFile):
+ if not os.path.exists(CopyMapFile) or \
+ (os.path.getmtime(MapFile) > os.path.getmtime(CopyMapFile)):
+ CopyLongFilePath(MapFile, CopyMapFile)
+
+ if not NoStrip:
+ FileBeforeStrip = os.path.join(OutputPath, ModuleName + '.efi')
+ if IsMakefile:
+ if GenFdsGlobalVariable.CopyList == []:
+ GenFdsGlobalVariable.CopyList = [(File, FileBeforeStrip)]
+ else:
+ GenFdsGlobalVariable.CopyList.append((File, FileBeforeStrip))
+ else:
+ if not os.path.exists(FileBeforeStrip) or \
+ (os.path.getmtime(File) > os.path.getmtime(FileBeforeStrip)):
+ CopyLongFilePath(File, FileBeforeStrip)
+ StrippedFile = os.path.join(OutputPath, ModuleName + '.stripped')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ StrippedFile,
+ [File],
+ Strip=True,
+ IsMakefile = IsMakefile
+ )
+ File = StrippedFile
+
+ """For TE Section call GenFw to generate TE image"""
+
+ if SectionType == BINARY_FILE_TYPE_TE:
+ TeFile = os.path.join( OutputPath, ModuleName + 'Te.raw')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ TeFile,
+ [File],
+ Type='te',
+ IsMakefile = IsMakefile
+ )
+ File = TeFile
+
+ """Call GenSection"""
+ GenFdsGlobalVariable.GenerateSection(OutputFile,
+ [File],
+ Section.Section.SectionType.get (SectionType),
+ IsMakefile=IsMakefile
+ )
+ OutputFileList.append(OutputFile)
+
+ return OutputFileList, Align
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Fd.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Fd.py
new file mode 100755
index 00000000..097f6244
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Fd.py
@@ -0,0 +1,155 @@
+## @file
+# process FD generation
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from . import Region
+from . import Fv
+import Common.LongFilePathOs as os
+from io import BytesIO
+import sys
+from struct import *
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+from CommonDataClass.FdfClass import FDClassObject
+from Common import EdkLogger
+from Common.BuildToolError import *
+from Common.Misc import SaveFileOnChange
+from Common.DataType import BINARY_FILE_TYPE_FV
+
+## generate FD
+#
+#
+class FD(FDClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FDClassObject.__init__(self)
+
+ ## GenFd() method
+ #
+ # Generate FD
+ #
+ # @retval string Generated FD file name
+ #
+ def GenFd (self, Flag = False):
+ if self.FdUiName.upper() + 'fd' in GenFdsGlobalVariable.ImageBinDict:
+ return GenFdsGlobalVariable.ImageBinDict[self.FdUiName.upper() + 'fd']
+
+ #
+ # Print Information
+ #
+ FdFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.FdUiName + '.fd')
+ if not Flag:
+ GenFdsGlobalVariable.InfLogger("\nFd File Name:%s (%s)" %(self.FdUiName, FdFileName))
+
+ Offset = 0x00
+ for item in self.BlockSizeList:
+ Offset = Offset + item[0] * item[1]
+ if Offset != self.Size:
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'FD %s Size not consistent with block array' % self.FdUiName)
+ GenFdsGlobalVariable.VerboseLogger('Following Fv will be add to Fd !!!')
+ for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
+ GenFdsGlobalVariable.VerboseLogger(FvObj)
+
+ HasCapsuleRegion = False
+ for RegionObj in self.RegionList:
+ if RegionObj.RegionType == 'CAPSULE':
+ HasCapsuleRegion = True
+ break
+ if HasCapsuleRegion:
+ TempFdBuffer = BytesIO()
+ PreviousRegionStart = -1
+ PreviousRegionSize = 1
+
+ for RegionObj in self.RegionList :
+ if RegionObj.RegionType == 'CAPSULE':
+ continue
+ if RegionObj.Offset + RegionObj.Size <= PreviousRegionStart:
+ pass
+ elif RegionObj.Offset <= PreviousRegionStart or (RegionObj.Offset >=PreviousRegionStart and RegionObj.Offset < PreviousRegionStart + PreviousRegionSize):
+ pass
+ elif RegionObj.Offset > PreviousRegionStart + PreviousRegionSize:
+ if not Flag:
+ GenFdsGlobalVariable.InfLogger('Padding region starting from offset 0x%X, with size 0x%X' %(PreviousRegionStart + PreviousRegionSize, RegionObj.Offset - (PreviousRegionStart + PreviousRegionSize)))
+ PadRegion = Region.Region()
+ PadRegion.Offset = PreviousRegionStart + PreviousRegionSize
+ PadRegion.Size = RegionObj.Offset - PadRegion.Offset
+ if not Flag:
+ PadRegion.AddToBuffer(TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)
+ PreviousRegionStart = RegionObj.Offset
+ PreviousRegionSize = RegionObj.Size
+ #
+ # Call each region's AddToBuffer function
+ #
+ if PreviousRegionSize > self.Size:
+ pass
+ GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
+ RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)
+
+ FdBuffer = BytesIO()
+ PreviousRegionStart = -1
+ PreviousRegionSize = 1
+ for RegionObj in self.RegionList :
+ if RegionObj.Offset + RegionObj.Size <= PreviousRegionStart:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ 'Region offset 0x%X in wrong order with Region starting from 0x%X, size 0x%X\nRegions in FDF must have offsets appear in ascending order.'\
+ % (RegionObj.Offset, PreviousRegionStart, PreviousRegionSize))
+ elif RegionObj.Offset <= PreviousRegionStart or (RegionObj.Offset >=PreviousRegionStart and RegionObj.Offset < PreviousRegionStart + PreviousRegionSize):
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ 'Region offset 0x%X overlaps with Region starting from 0x%X, size 0x%X' \
+ % (RegionObj.Offset, PreviousRegionStart, PreviousRegionSize))
+ elif RegionObj.Offset > PreviousRegionStart + PreviousRegionSize:
+ if not Flag:
+ GenFdsGlobalVariable.InfLogger('Padding region starting from offset 0x%X, with size 0x%X' %(PreviousRegionStart + PreviousRegionSize, RegionObj.Offset - (PreviousRegionStart + PreviousRegionSize)))
+ PadRegion = Region.Region()
+ PadRegion.Offset = PreviousRegionStart + PreviousRegionSize
+ PadRegion.Size = RegionObj.Offset - PadRegion.Offset
+ if not Flag:
+ PadRegion.AddToBuffer(FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)
+ PreviousRegionStart = RegionObj.Offset
+ PreviousRegionSize = RegionObj.Size
+ #
+ # Verify current region fits within allocated FD section Size
+ #
+ if PreviousRegionStart + PreviousRegionSize > self.Size:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ 'FD %s size too small to fit region with offset 0x%X and size 0x%X'
+ % (self.FdUiName, PreviousRegionStart, PreviousRegionSize))
+ #
+ # Call each region's AddToBuffer function
+ #
+ GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
+ RegionObj.AddToBuffer (FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict, Flag=Flag)
+ #
+ # Write the buffer contents to Fd file
+ #
+ GenFdsGlobalVariable.VerboseLogger('Write the buffer contents to Fd file')
+ if not Flag:
+ SaveFileOnChange(FdFileName, FdBuffer.getvalue())
+ FdBuffer.close()
+ GenFdsGlobalVariable.ImageBinDict[self.FdUiName.upper() + 'fd'] = FdFileName
+ return FdFileName
+
+ ## generate flash map file
+ #
+ # @param self The object pointer
+ #
+ def GenFlashMap (self):
+ pass
+
+
+
+
+
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FdfParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FdfParser.py
new file mode 100755
index 00000000..73ae5b32
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FdfParser.py
@@ -0,0 +1,4526 @@
+## @file
+# parse FDF file
+#
+# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2015, Hewlett Packard Enterprise Development, L.P.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+from re import compile, DOTALL
+from string import hexdigits
+from uuid import UUID
+
+from Common.BuildToolError import *
+from Common import EdkLogger
+from Common.Misc import PathClass, tdict, ProcessDuplicatedInf, GuidStructureStringToGuidString
+from Common.StringUtils import NormPath, ReplaceMacro
+from Common import GlobalData
+from Common.Expression import *
+from Common.DataType import *
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+import Common.LongFilePathOs as os
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.RangeExpression import RangeExpression
+from collections import OrderedDict
+
+from .Fd import FD
+from .Region import Region
+from .Fv import FV
+from .AprioriSection import AprioriSection
+from .FfsInfStatement import FfsInfStatement
+from .FfsFileStatement import FileStatement
+from .VerSection import VerSection
+from .UiSection import UiSection
+from .FvImageSection import FvImageSection
+from .DataSection import DataSection
+from .DepexSection import DepexSection
+from .CompressSection import CompressSection
+from .GuidSection import GuidSection
+from .Capsule import EFI_CERT_TYPE_PKCS7_GUID, EFI_CERT_TYPE_RSA2048_SHA256_GUID, Capsule
+from .CapsuleData import CapsuleFfs, CapsulePayload, CapsuleFv, CapsuleFd, CapsuleAnyFile, CapsuleAfile
+from .RuleComplexFile import RuleComplexFile
+from .RuleSimpleFile import RuleSimpleFile
+from .EfiSection import EfiSection
+from .OptionRom import OPTIONROM
+from .OptRomInfStatement import OptRomInfStatement, OverrideAttribs
+from .OptRomFileStatement import OptRomFileStatement
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+
+T_CHAR_CR = '\r'
+T_CHAR_TAB = '\t'
+T_CHAR_DOUBLE_QUOTE = '\"'
+T_CHAR_SINGLE_QUOTE = '\''
+T_CHAR_BRACE_R = '}'
+
+SEPARATORS = {TAB_EQUAL_SPLIT, TAB_VALUE_SPLIT, TAB_COMMA_SPLIT, '{', T_CHAR_BRACE_R}
+ALIGNMENTS = {"Auto", "8", "16", "32", "64", "128", "512", "1K", "4K", "32K", "64K", "128K",
+ "256K", "512K", "1M", "2M", "4M", "8M", "16M"}
+ALIGNMENT_NOAUTO = ALIGNMENTS - {"Auto"}
+CR_LB_SET = {T_CHAR_CR, TAB_LINE_BREAK}
+
+RegionSizePattern = compile("\s*(?P<base>(?:0x|0X)?[a-fA-F0-9]+)\s*\|\s*(?P<size>(?:0x|0X)?[a-fA-F0-9]+)\s*")
+RegionSizeGuidPattern = compile("\s*(?P<base>\w+\.\w+[\.\w\[\]]*)\s*\|\s*(?P<size>\w+\.\w+[\.\w\[\]]*)\s*")
+RegionOffsetPcdPattern = compile("\s*(?P<base>\w+\.\w+[\.\w\[\]]*)\s*$")
+ShortcutPcdPattern = compile("\s*\w+\s*=\s*(?P<value>(?:0x|0X)?[a-fA-F0-9]+)\s*\|\s*(?P<name>\w+\.\w+)\s*")
+BaseAddrValuePattern = compile('^0[xX][0-9a-fA-F]+')
+FileExtensionPattern = compile(r'([a-zA-Z][a-zA-Z0-9]*)')
+TokenFindPattern = compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\)|\*)')
+AllIncludeFileList = []
+
+# Get the closest parent
+def GetParentAtLine (Line):
+ for Profile in AllIncludeFileList:
+ if Profile.IsLineInFile(Line):
+ return Profile
+ return None
+
+# Check include loop
+def IsValidInclude (File, Line):
+ for Profile in AllIncludeFileList:
+ if Profile.IsLineInFile(Line) and Profile.FileName == File:
+ return False
+
+ return True
+
+def GetRealFileLine (File, Line):
+ InsertedLines = 0
+ for Profile in AllIncludeFileList:
+ if Profile.IsLineInFile(Line):
+ return Profile.GetLineInFile(Line)
+ elif Line >= Profile.InsertStartLineNumber and Profile.Level == 1:
+ InsertedLines += Profile.GetTotalLines()
+
+ return (File, Line - InsertedLines)
+
+## The exception class that used to report error messages when parsing FDF
+#
+# Currently the "ToolName" is set to be "FdfParser".
+#
+class Warning (Exception):
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param File The FDF name
+ # @param Line The Line number that error occurs
+ #
+ def __init__(self, Str, File = None, Line = None):
+ FileLineTuple = GetRealFileLine(File, Line)
+ self.FileName = FileLineTuple[0]
+ self.LineNumber = FileLineTuple[1]
+ self.OriginalLineNumber = Line
+ self.Message = Str
+ self.ToolName = 'FdfParser'
+
+ def __str__(self):
+ return self.Message
+
+ # helper functions to facilitate consistency in warnings
+ # each function is for a different common warning
+ @staticmethod
+ def Expected(Str, File, Line):
+ return Warning("expected {}".format(Str), File, Line)
+ @staticmethod
+ def ExpectedEquals(File, Line):
+ return Warning.Expected("'='", File, Line)
+ @staticmethod
+ def ExpectedCurlyOpen(File, Line):
+ return Warning.Expected("'{'", File, Line)
+ @staticmethod
+ def ExpectedCurlyClose(File, Line):
+ return Warning.Expected("'}'", File, Line)
+ @staticmethod
+ def ExpectedBracketClose(File, Line):
+ return Warning.Expected("']'", File, Line)
+
+## The Include file content class that used to record file data when parsing include file
+#
+# May raise Exception when opening file.
+#
+class IncludeFileProfile:
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.FileName = FileName
+ self.FileLinesList = []
+ try:
+ with open(FileName, "r") as fsock:
+ self.FileLinesList = fsock.readlines()
+ for index, line in enumerate(self.FileLinesList):
+ if not line.endswith(TAB_LINE_BREAK):
+ self.FileLinesList[index] += TAB_LINE_BREAK
+ except:
+ EdkLogger.error("FdfParser", FILE_OPEN_FAILURE, ExtraData=FileName)
+
+ self.InsertStartLineNumber = None
+ self.InsertAdjust = 0
+ self.IncludeFileList = []
+ self.Level = 1 # first level include file
+
+ def GetTotalLines(self):
+ TotalLines = self.InsertAdjust + len(self.FileLinesList)
+
+ for Profile in self.IncludeFileList:
+ TotalLines += Profile.GetTotalLines()
+
+ return TotalLines
+
+ def IsLineInFile(self, Line):
+ if Line >= self.InsertStartLineNumber and Line < self.InsertStartLineNumber + self.GetTotalLines():
+ return True
+
+ return False
+
+ def GetLineInFile(self, Line):
+ if not self.IsLineInFile (Line):
+ return (self.FileName, -1)
+
+ InsertedLines = self.InsertStartLineNumber
+
+ for Profile in self.IncludeFileList:
+ if Profile.IsLineInFile(Line):
+ return Profile.GetLineInFile(Line)
+ elif Line >= Profile.InsertStartLineNumber:
+ InsertedLines += Profile.GetTotalLines()
+
+ return (self.FileName, Line - InsertedLines + 1)
+
+## The FDF content class that used to record file data when parsing FDF
+#
+# May raise Exception when opening file.
+#
+class FileProfile:
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.FileLinesList = []
+ try:
+ with open(FileName, "r") as fsock:
+ self.FileLinesList = fsock.readlines()
+
+ except:
+ EdkLogger.error("FdfParser", FILE_OPEN_FAILURE, ExtraData=FileName)
+
+ self.FileName = FileName
+ self.PcdDict = OrderedDict()
+ self.PcdLocalDict = OrderedDict()
+ self.InfList = []
+ self.InfDict = {'ArchTBD':[]}
+ # ECC will use this Dict and List information
+ self.PcdFileLineDict = {}
+ self.InfFileLineList = []
+
+ self.FdDict = {}
+ self.FdNameNotSet = False
+ self.FvDict = {}
+ self.CapsuleDict = {}
+ self.RuleDict = {}
+ self.OptRomDict = {}
+ self.FmpPayloadDict = {}
+
+## The syntax parser for FDF
+#
+# PreprocessFile method should be called prior to ParseFile
+# CycleReferenceCheck method can detect cycles in FDF contents
+#
+# GetNext*** procedures mean these procedures will get next token first, then make judgement.
+# Get*** procedures mean these procedures will make judgement on current token only.
+#
+class FdfParser:
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.Profile = FileProfile(FileName)
+ self.FileName = FileName
+ self.CurrentLineNumber = 1
+ self.CurrentOffsetWithinLine = 0
+ self.CurrentFdName = None
+ self.CurrentFvName = None
+ self._Token = ""
+ self._SkippedChars = ""
+ GlobalData.gFdfParser = self
+
+ # Used to section info
+ self._CurSection = []
+ # Key: [section name, UI name, arch]
+ # Value: {MACRO_NAME: MACRO_VALUE}
+ self._MacroDict = tdict(True, 3)
+ self._PcdDict = OrderedDict()
+
+ self._WipeOffArea = []
+ if GenFdsGlobalVariable.WorkSpaceDir == '':
+ GenFdsGlobalVariable.WorkSpaceDir = os.getenv("WORKSPACE")
+
+ ## _SkipWhiteSpace() method
+ #
+ # Skip white spaces from current char.
+ #
+ # @param self The object pointer
+ #
+ def _SkipWhiteSpace(self):
+ while not self._EndOfFile():
+ if self._CurrentChar() in {TAB_PRINTCHAR_NUL, T_CHAR_CR, TAB_LINE_BREAK, TAB_SPACE_SPLIT, T_CHAR_TAB}:
+ self._SkippedChars += str(self._CurrentChar())
+ self._GetOneChar()
+ else:
+ return
+ return
+
+ ## _EndOfFile() method
+ #
+ # Judge current buffer pos is at file end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at file end
+ # @retval False Current File buffer position is NOT at file end
+ #
+ def _EndOfFile(self):
+ NumberOfLines = len(self.Profile.FileLinesList)
+ SizeOfLastLine = len(self.Profile.FileLinesList[-1])
+ if self.CurrentLineNumber == NumberOfLines and self.CurrentOffsetWithinLine >= SizeOfLastLine - 1:
+ return True
+ if self.CurrentLineNumber > NumberOfLines:
+ return True
+ return False
+
+ ## _EndOfLine() method
+ #
+ # Judge current buffer pos is at line end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at line end
+ # @retval False Current File buffer position is NOT at line end
+ #
+ def _EndOfLine(self):
+ if self.CurrentLineNumber > len(self.Profile.FileLinesList):
+ return True
+ SizeOfCurrentLine = len(self.Profile.FileLinesList[self.CurrentLineNumber - 1])
+ if self.CurrentOffsetWithinLine >= SizeOfCurrentLine:
+ return True
+ return False
+
+ ## Rewind() method
+ #
+ # Reset file data buffer to the initial state
+ #
+ # @param self The object pointer
+ # @param DestLine Optional new destination line number.
+ # @param DestOffset Optional new destination offset.
+ #
+ def Rewind(self, DestLine = 1, DestOffset = 0):
+ self.CurrentLineNumber = DestLine
+ self.CurrentOffsetWithinLine = DestOffset
+
+ ## _UndoOneChar() method
+ #
+ # Go back one char in the file buffer
+ #
+ # @param self The object pointer
+ # @retval True Successfully go back one char
+ # @retval False Not able to go back one char as file beginning reached
+ #
+ def _UndoOneChar(self):
+ if self.CurrentLineNumber == 1 and self.CurrentOffsetWithinLine == 0:
+ return False
+ elif self.CurrentOffsetWithinLine == 0:
+ self.CurrentLineNumber -= 1
+ self.CurrentOffsetWithinLine = len(self._CurrentLine()) - 1
+ else:
+ self.CurrentOffsetWithinLine -= 1
+ return True
+
+ ## _GetOneChar() method
+ #
+ # Move forward one char in the file buffer
+ #
+ # @param self The object pointer
+ #
+ def _GetOneChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ else:
+ self.CurrentOffsetWithinLine += 1
+
+ ## _CurrentChar() method
+ #
+ # Get the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Current char
+ #
+ def _CurrentChar(self):
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine]
+
+ ## _NextChar() method
+ #
+ # Get the one char pass the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Next char
+ #
+ def _NextChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ return self.Profile.FileLinesList[self.CurrentLineNumber][0]
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine + 1]
+
+ ## _SetCurrentCharValue() method
+ #
+ # Modify the value of current char
+ #
+ # @param self The object pointer
+ # @param Value The new value of current char
+ #
+ def _SetCurrentCharValue(self, Value):
+ self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine] = Value
+
+ ## _CurrentLine() method
+ #
+ # Get the list that contains current line contents
+ #
+ # @param self The object pointer
+ # @retval List current line contents
+ #
+ def _CurrentLine(self):
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1]
+
+ def _StringToList(self):
+ self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesList]
+ if not self.Profile.FileLinesList:
+ EdkLogger.error('FdfParser', FILE_READ_FAILURE, 'The file is empty!', File=self.FileName)
+ self.Profile.FileLinesList[-1].append(' ')
+
+ def _ReplaceFragment(self, StartPos, EndPos, Value = ' '):
+ if StartPos[0] == EndPos[0]:
+ Offset = StartPos[1]
+ while Offset <= EndPos[1]:
+ self.Profile.FileLinesList[StartPos[0]][Offset] = Value
+ Offset += 1
+ return
+
+ Offset = StartPos[1]
+ while self.Profile.FileLinesList[StartPos[0]][Offset] not in CR_LB_SET:
+ self.Profile.FileLinesList[StartPos[0]][Offset] = Value
+ Offset += 1
+
+ Line = StartPos[0]
+ while Line < EndPos[0]:
+ Offset = 0
+ while self.Profile.FileLinesList[Line][Offset] not in CR_LB_SET:
+ self.Profile.FileLinesList[Line][Offset] = Value
+ Offset += 1
+ Line += 1
+
+ Offset = 0
+ while Offset <= EndPos[1]:
+ self.Profile.FileLinesList[EndPos[0]][Offset] = Value
+ Offset += 1
+
+ def _SetMacroValue(self, Macro, Value):
+ if not self._CurSection:
+ return
+
+ MacroDict = {}
+ if not self._MacroDict[self._CurSection[0], self._CurSection[1], self._CurSection[2]]:
+ self._MacroDict[self._CurSection[0], self._CurSection[1], self._CurSection[2]] = MacroDict
+ else:
+ MacroDict = self._MacroDict[self._CurSection[0], self._CurSection[1], self._CurSection[2]]
+ MacroDict[Macro] = Value
+
+ def _GetMacroValue(self, Macro):
+ # Highest priority
+ if Macro in GlobalData.gCommandLineDefines:
+ return GlobalData.gCommandLineDefines[Macro]
+ if Macro in GlobalData.gGlobalDefines:
+ return GlobalData.gGlobalDefines[Macro]
+
+ if self._CurSection:
+ MacroDict = self._MacroDict[
+ self._CurSection[0],
+ self._CurSection[1],
+ self._CurSection[2]
+ ]
+ if MacroDict and Macro in MacroDict:
+ return MacroDict[Macro]
+
+ # Lowest priority
+ if Macro in GlobalData.gPlatformDefines:
+ return GlobalData.gPlatformDefines[Macro]
+ return None
+
+ def _SectionHeaderParser(self, Section):
+ # [Defines]
+ # [FD.UiName]: use dummy instead if UI name is optional
+ # [FV.UiName]
+ # [Capsule.UiName]
+ # [Rule]: don't take rule section into account, macro is not allowed in this section
+ # [OptionRom.DriverName]
+ self._CurSection = []
+ Section = Section.strip()[1:-1].upper().replace(' ', '').strip(TAB_SPLIT)
+ ItemList = Section.split(TAB_SPLIT)
+ Item = ItemList[0]
+ if Item == '' or Item == 'RULE':
+ return
+
+ if Item == TAB_COMMON_DEFINES.upper():
+ self._CurSection = [TAB_COMMON, TAB_COMMON, TAB_COMMON]
+ elif len(ItemList) > 1:
+ self._CurSection = [ItemList[0], ItemList[1], TAB_COMMON]
+ elif len(ItemList) > 0:
+ self._CurSection = [ItemList[0], 'DUMMY', TAB_COMMON]
+
+ ## PreprocessFile() method
+ #
+ # Preprocess file contents, replace comments with spaces.
+ # In the end, rewind the file buffer pointer to the beginning
+ # BUGBUG: No !include statement processing contained in this procedure
+ # !include statement should be expanded at the same FileLinesList[CurrentLineNumber - 1]
+ #
+ # @param self The object pointer
+ #
+ def PreprocessFile(self):
+ self.Rewind()
+ InComment = False
+ DoubleSlashComment = False
+ HashComment = False
+ # HashComment in quoted string " " is ignored.
+ InString = False
+
+ while not self._EndOfFile():
+
+ if self._CurrentChar() == T_CHAR_DOUBLE_QUOTE and not InComment:
+ InString = not InString
+ # meet new line, then no longer in a comment for // and '#'
+ if self._CurrentChar() == TAB_LINE_BREAK:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ if InComment and DoubleSlashComment:
+ InComment = False
+ DoubleSlashComment = False
+ if InComment and HashComment:
+ InComment = False
+ HashComment = False
+ # check for */ comment end
+ elif InComment and not DoubleSlashComment and not HashComment and self._CurrentChar() == TAB_STAR and self._NextChar() == TAB_BACK_SLASH:
+ self._SetCurrentCharValue(TAB_SPACE_SPLIT)
+ self._GetOneChar()
+ self._SetCurrentCharValue(TAB_SPACE_SPLIT)
+ self._GetOneChar()
+ InComment = False
+ # set comments to spaces
+ elif InComment:
+ self._SetCurrentCharValue(TAB_SPACE_SPLIT)
+ self._GetOneChar()
+ # check for // comment
+ elif self._CurrentChar() == TAB_BACK_SLASH and self._NextChar() == TAB_BACK_SLASH and not self._EndOfLine():
+ InComment = True
+ DoubleSlashComment = True
+ # check for '#' comment
+ elif self._CurrentChar() == TAB_COMMENT_SPLIT and not self._EndOfLine() and not InString:
+ InComment = True
+ HashComment = True
+ # check for /* comment start
+ elif self._CurrentChar() == TAB_BACK_SLASH and self._NextChar() == TAB_STAR:
+ self._SetCurrentCharValue(TAB_SPACE_SPLIT)
+ self._GetOneChar()
+ self._SetCurrentCharValue(TAB_SPACE_SPLIT)
+ self._GetOneChar()
+ InComment = True
+ else:
+ self._GetOneChar()
+
+ # restore from ListOfList to ListOfString
+ self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
+ self.Rewind()
+
+ ## PreprocessIncludeFile() method
+ #
+ # Preprocess file contents, replace !include statements with file contents.
+ # In the end, rewind the file buffer pointer to the beginning
+ #
+ # @param self The object pointer
+ #
+ def PreprocessIncludeFile(self):
+ # nested include support
+ Processed = False
+ MacroDict = {}
+ while self._GetNextToken():
+
+ if self._Token == TAB_DEFINE:
+ if not self._GetNextToken():
+ raise Warning.Expected("Macro name", self.FileName, self.CurrentLineNumber)
+ Macro = self._Token
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ Value = self._GetExpression()
+ MacroDict[Macro] = Value
+
+ elif self._Token == TAB_INCLUDE:
+ Processed = True
+ IncludeLine = self.CurrentLineNumber
+ IncludeOffset = self.CurrentOffsetWithinLine - len(TAB_INCLUDE)
+ if not self._GetNextToken():
+ raise Warning.Expected("include file name", self.FileName, self.CurrentLineNumber)
+ IncFileName = self._Token
+ PreIndex = 0
+ StartPos = IncFileName.find('$(', PreIndex)
+ EndPos = IncFileName.find(')', StartPos+2)
+ while StartPos != -1 and EndPos != -1:
+ Macro = IncFileName[StartPos+2: EndPos]
+ MacroVal = self._GetMacroValue(Macro)
+ if not MacroVal:
+ if Macro in MacroDict:
+ MacroVal = MacroDict[Macro]
+ if MacroVal is not None:
+ IncFileName = IncFileName.replace('$(' + Macro + ')', MacroVal, 1)
+ if MacroVal.find('$(') != -1:
+ PreIndex = StartPos
+ else:
+ PreIndex = StartPos + len(MacroVal)
+ else:
+ raise Warning("The Macro %s is not defined" %Macro, self.FileName, self.CurrentLineNumber)
+ StartPos = IncFileName.find('$(', PreIndex)
+ EndPos = IncFileName.find(')', StartPos+2)
+
+ IncludedFile = NormPath(IncFileName)
+ #
+ # First search the include file under the same directory as FDF file
+ #
+ IncludedFile1 = PathClass(IncludedFile, os.path.dirname(self.FileName))
+ ErrorCode = IncludedFile1.Validate()[0]
+ if ErrorCode != 0:
+ #
+ # Then search the include file under the same directory as DSC file
+ #
+ PlatformDir = ''
+ if GenFdsGlobalVariable.ActivePlatform:
+ PlatformDir = GenFdsGlobalVariable.ActivePlatform.Dir
+ elif GlobalData.gActivePlatform:
+ PlatformDir = GlobalData.gActivePlatform.MetaFile.Dir
+ IncludedFile1 = PathClass(IncludedFile, PlatformDir)
+ ErrorCode = IncludedFile1.Validate()[0]
+ if ErrorCode != 0:
+ #
+ # Also search file under the WORKSPACE directory
+ #
+ IncludedFile1 = PathClass(IncludedFile, GlobalData.gWorkspace)
+ ErrorCode = IncludedFile1.Validate()[0]
+ if ErrorCode != 0:
+ raise Warning("The include file does not exist under below directories: \n%s\n%s\n%s\n"%(os.path.dirname(self.FileName), PlatformDir, GlobalData.gWorkspace),
+ self.FileName, self.CurrentLineNumber)
+
+ if not IsValidInclude (IncludedFile1.Path, self.CurrentLineNumber):
+ raise Warning("The include file {0} is causing a include loop.\n".format (IncludedFile1.Path), self.FileName, self.CurrentLineNumber)
+
+ IncFileProfile = IncludeFileProfile(IncludedFile1.Path)
+
+ CurrentLine = self.CurrentLineNumber
+ CurrentOffset = self.CurrentOffsetWithinLine
+ # list index of the insertion, note that line number is 'CurrentLine + 1'
+ InsertAtLine = CurrentLine
+ ParentProfile = GetParentAtLine (CurrentLine)
+ if ParentProfile is not None:
+ ParentProfile.IncludeFileList.insert(0, IncFileProfile)
+ IncFileProfile.Level = ParentProfile.Level + 1
+ IncFileProfile.InsertStartLineNumber = InsertAtLine + 1
+ # deal with remaining portions after "!include filename", if exists.
+ if self._GetNextToken():
+ if self.CurrentLineNumber == CurrentLine:
+ RemainingLine = self._CurrentLine()[CurrentOffset:]
+ self.Profile.FileLinesList.insert(self.CurrentLineNumber, RemainingLine)
+ IncFileProfile.InsertAdjust += 1
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+
+ for Line in IncFileProfile.FileLinesList:
+ self.Profile.FileLinesList.insert(InsertAtLine, Line)
+ self.CurrentLineNumber += 1
+ InsertAtLine += 1
+
+ # reversely sorted to better determine error in file
+ AllIncludeFileList.insert(0, IncFileProfile)
+
+ # comment out the processed include file statement
+ TempList = list(self.Profile.FileLinesList[IncludeLine - 1])
+ TempList.insert(IncludeOffset, TAB_COMMENT_SPLIT)
+ self.Profile.FileLinesList[IncludeLine - 1] = ''.join(TempList)
+ if Processed: # Nested and back-to-back support
+ self.Rewind(DestLine = IncFileProfile.InsertStartLineNumber - 1)
+ Processed = False
+ # Preprocess done.
+ self.Rewind()
+
+ @staticmethod
+ def _GetIfListCurrentItemStat(IfList):
+ if len(IfList) == 0:
+ return True
+
+ for Item in IfList:
+ if Item[1] == False:
+ return False
+
+ return True
+
+ ## PreprocessConditionalStatement() method
+ #
+ # Preprocess conditional statement.
+ # In the end, rewind the file buffer pointer to the beginning
+ #
+ # @param self The object pointer
+ #
+ def PreprocessConditionalStatement(self):
+ # IfList is a stack of if branches with elements of list [Pos, CondSatisfied, BranchDetermined]
+ IfList = []
+ RegionLayoutLine = 0
+ ReplacedLine = -1
+ while self._GetNextToken():
+ # Determine section name and the location dependent macro
+ if self._GetIfListCurrentItemStat(IfList):
+ if self._Token.startswith(TAB_SECTION_START):
+ Header = self._Token
+ if not self._Token.endswith(TAB_SECTION_END):
+ self._SkipToToken(TAB_SECTION_END)
+ Header += self._SkippedChars
+ if Header.find('$(') != -1:
+ raise Warning("macro cannot be used in section header", self.FileName, self.CurrentLineNumber)
+ self._SectionHeaderParser(Header)
+ continue
+ # Replace macros except in RULE section or out of section
+ elif self._CurSection and ReplacedLine != self.CurrentLineNumber:
+ ReplacedLine = self.CurrentLineNumber
+ self._UndoToken()
+ CurLine = self.Profile.FileLinesList[ReplacedLine - 1]
+ PreIndex = 0
+ StartPos = CurLine.find('$(', PreIndex)
+ EndPos = CurLine.find(')', StartPos+2)
+ while StartPos != -1 and EndPos != -1 and self._Token not in {TAB_IF_DEF, TAB_IF_N_DEF, TAB_IF, TAB_ELSE_IF}:
+ MacroName = CurLine[StartPos+2: EndPos]
+ MacroValue = self._GetMacroValue(MacroName)
+ if MacroValue is not None:
+ CurLine = CurLine.replace('$(' + MacroName + ')', MacroValue, 1)
+ if MacroValue.find('$(') != -1:
+ PreIndex = StartPos
+ else:
+ PreIndex = StartPos + len(MacroValue)
+ else:
+ PreIndex = EndPos + 1
+ StartPos = CurLine.find('$(', PreIndex)
+ EndPos = CurLine.find(')', StartPos+2)
+ self.Profile.FileLinesList[ReplacedLine - 1] = CurLine
+ continue
+
+ if self._Token == TAB_DEFINE:
+ if self._GetIfListCurrentItemStat(IfList):
+ if not self._CurSection:
+ raise Warning("macro cannot be defined in Rule section or out of section", self.FileName, self.CurrentLineNumber)
+ DefineLine = self.CurrentLineNumber - 1
+ DefineOffset = self.CurrentOffsetWithinLine - len(TAB_DEFINE)
+ if not self._GetNextToken():
+ raise Warning.Expected("Macro name", self.FileName, self.CurrentLineNumber)
+ Macro = self._Token
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ Value = self._GetExpression()
+ self._SetMacroValue(Macro, Value)
+ self._WipeOffArea.append(((DefineLine, DefineOffset), (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+ elif self._Token == 'SET':
+ if not self._GetIfListCurrentItemStat(IfList):
+ continue
+ SetLine = self.CurrentLineNumber - 1
+ SetOffset = self.CurrentOffsetWithinLine - len('SET')
+ PcdPair = self._GetNextPcdSettings()
+ PcdName = "%s.%s" % (PcdPair[1], PcdPair[0])
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ Value = self._GetExpression()
+ Value = self._EvaluateConditional(Value, self.CurrentLineNumber, 'eval', True)
+
+ self._PcdDict[PcdName] = Value
+
+ self.Profile.PcdDict[PcdPair] = Value
+ self.SetPcdLocalation(PcdPair)
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[PcdPair] = FileLineTuple
+
+ self._WipeOffArea.append(((SetLine, SetOffset), (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+ elif self._Token in {TAB_IF_DEF, TAB_IF_N_DEF, TAB_IF}:
+ IfStartPos = (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len(self._Token))
+ IfList.append([IfStartPos, None, None])
+
+ CondLabel = self._Token
+ Expression = self._GetExpression()
+
+ if CondLabel == TAB_IF:
+ ConditionSatisfied = self._EvaluateConditional(Expression, IfList[-1][0][0] + 1, 'eval')
+ else:
+ ConditionSatisfied = self._EvaluateConditional(Expression, IfList[-1][0][0] + 1, 'in')
+ if CondLabel == TAB_IF_N_DEF:
+ ConditionSatisfied = not ConditionSatisfied
+
+ BranchDetermined = ConditionSatisfied
+ IfList[-1] = [IfList[-1][0], ConditionSatisfied, BranchDetermined]
+ if ConditionSatisfied:
+ self._WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+ elif self._Token in {TAB_ELSE_IF, TAB_ELSE}:
+ ElseStartPos = (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len(self._Token))
+ if len(IfList) <= 0:
+ raise Warning("Missing !if statement", self.FileName, self.CurrentLineNumber)
+
+ if IfList[-1][1]:
+ IfList[-1] = [ElseStartPos, False, True]
+ self._WipeOffArea.append((ElseStartPos, (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+ else:
+ self._WipeOffArea.append((IfList[-1][0], ElseStartPos))
+ IfList[-1] = [ElseStartPos, True, IfList[-1][2]]
+ if self._Token == TAB_ELSE_IF:
+ Expression = self._GetExpression()
+ ConditionSatisfied = self._EvaluateConditional(Expression, IfList[-1][0][0] + 1, 'eval')
+ IfList[-1] = [IfList[-1][0], ConditionSatisfied, IfList[-1][2]]
+
+ if IfList[-1][1]:
+ if IfList[-1][2]:
+ IfList[-1][1] = False
+ else:
+ IfList[-1][2] = True
+ self._WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+ elif self._Token == '!endif':
+ if len(IfList) <= 0:
+ raise Warning("Missing !if statement", self.FileName, self.CurrentLineNumber)
+ if IfList[-1][1]:
+ self._WipeOffArea.append(((self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len('!endif')), (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+ else:
+ self._WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+ IfList.pop()
+ elif not IfList: # Don't use PCDs inside conditional directive
+ if self.CurrentLineNumber <= RegionLayoutLine:
+ # Don't try the same line twice
+ continue
+ SetPcd = ShortcutPcdPattern.match(self.Profile.FileLinesList[self.CurrentLineNumber - 1])
+ if SetPcd:
+ self._PcdDict[SetPcd.group('name')] = SetPcd.group('value')
+ RegionLayoutLine = self.CurrentLineNumber
+ continue
+ RegionSize = RegionSizePattern.match(self.Profile.FileLinesList[self.CurrentLineNumber - 1])
+ if not RegionSize:
+ RegionLayoutLine = self.CurrentLineNumber
+ continue
+ RegionSizeGuid = RegionSizeGuidPattern.match(self.Profile.FileLinesList[self.CurrentLineNumber])
+ if not RegionSizeGuid:
+ RegionLayoutLine = self.CurrentLineNumber + 1
+ continue
+ self._PcdDict[RegionSizeGuid.group('base')] = RegionSize.group('base')
+ self._PcdDict[RegionSizeGuid.group('size')] = RegionSize.group('size')
+ RegionLayoutLine = self.CurrentLineNumber + 1
+
+ if IfList:
+ raise Warning("Missing !endif", self.FileName, self.CurrentLineNumber)
+ self.Rewind()
+
+ def _CollectMacroPcd(self):
+ MacroDict = {}
+
+ # PCD macro
+ MacroDict.update(GlobalData.gPlatformPcds)
+ MacroDict.update(self._PcdDict)
+
+ # Lowest priority
+ MacroDict.update(GlobalData.gPlatformDefines)
+
+ if self._CurSection:
+ # Defines macro
+ ScopeMacro = self._MacroDict[TAB_COMMON, TAB_COMMON, TAB_COMMON]
+ if ScopeMacro:
+ MacroDict.update(ScopeMacro)
+
+ # Section macro
+ ScopeMacro = self._MacroDict[
+ self._CurSection[0],
+ self._CurSection[1],
+ self._CurSection[2]
+ ]
+ if ScopeMacro:
+ MacroDict.update(ScopeMacro)
+
+ MacroDict.update(GlobalData.gGlobalDefines)
+ MacroDict.update(GlobalData.gCommandLineDefines)
+ for Item in GlobalData.BuildOptionPcd:
+ if isinstance(Item, tuple):
+ continue
+ PcdName, TmpValue = Item.split(TAB_EQUAL_SPLIT)
+ TmpValue = BuildOptionValue(TmpValue, {})
+ MacroDict[PcdName.strip()] = TmpValue
+ # Highest priority
+
+ return MacroDict
+
+ def _EvaluateConditional(self, Expression, Line, Op = None, Value = None):
+ MacroPcdDict = self._CollectMacroPcd()
+ if Op == 'eval':
+ try:
+ if Value:
+ return ValueExpression(Expression, MacroPcdDict)(True)
+ else:
+ return ValueExpression(Expression, MacroPcdDict)()
+ except WrnExpression as Excpt:
+ #
+ # Catch expression evaluation warning here. We need to report
+ # the precise number of line and return the evaluation result
+ #
+ EdkLogger.warn('Parser', "Suspicious expression: %s" % str(Excpt),
+ File=self.FileName, ExtraData=self._CurrentLine(),
+ Line=Line)
+ return Excpt.result
+ except Exception as Excpt:
+ if hasattr(Excpt, 'Pcd'):
+ if Excpt.Pcd in GlobalData.gPlatformOtherPcds:
+ Info = GlobalData.gPlatformOtherPcds[Excpt.Pcd]
+ raise Warning("Cannot use this PCD (%s) in an expression as"
+ " it must be defined in a [PcdsFixedAtBuild] or [PcdsFeatureFlag] section"
+ " of the DSC file (%s), and it is currently defined in this section:"
+ " %s, line #: %d." % (Excpt.Pcd, GlobalData.gPlatformOtherPcds['DSCFILE'], Info[0], Info[1]),
+ self.FileName, Line)
+ else:
+ raise Warning("PCD (%s) is not defined in DSC file (%s)" % (Excpt.Pcd, GlobalData.gPlatformOtherPcds['DSCFILE']),
+ self.FileName, Line)
+ else:
+ raise Warning(str(Excpt), self.FileName, Line)
+ else:
+ if Expression.startswith('$(') and Expression[-1] == ')':
+ Expression = Expression[2:-1]
+ return Expression in MacroPcdDict
+
+ ## _IsToken() method
+ #
+ # Check whether input string is found from current char position along
+ # If found, the string value is put into self._Token
+ #
+ # @param self The object pointer
+ # @param String The string to search
+ # @param IgnoreCase Indicate case sensitive/non-sensitive search, default is case sensitive
+ # @retval True Successfully find string, file buffer pointer moved forward
+ # @retval False Not able to find string, file buffer pointer not changed
+ #
+ def _IsToken(self, String, IgnoreCase = False):
+ self._SkipWhiteSpace()
+
+ # Only consider the same line, no multi-line token allowed
+ StartPos = self.CurrentOffsetWithinLine
+ index = -1
+ if IgnoreCase:
+ index = self._CurrentLine()[self.CurrentOffsetWithinLine: ].upper().find(String.upper())
+ else:
+ index = self._CurrentLine()[self.CurrentOffsetWithinLine: ].find(String)
+ if index == 0:
+ self.CurrentOffsetWithinLine += len(String)
+ self._Token = self._CurrentLine()[StartPos: self.CurrentOffsetWithinLine]
+ return True
+ return False
+
+ ## _IsKeyword() method
+ #
+ # Check whether input keyword is found from current char position along, whole word only!
+ # If found, the string value is put into self._Token
+ #
+ # @param self The object pointer
+ # @param Keyword The string to search
+ # @param IgnoreCase Indicate case sensitive/non-sensitive search, default is case sensitive
+ # @retval True Successfully find string, file buffer pointer moved forward
+ # @retval False Not able to find string, file buffer pointer not changed
+ #
+ def _IsKeyword(self, KeyWord, IgnoreCase = False):
+ self._SkipWhiteSpace()
+
+ # Only consider the same line, no multi-line token allowed
+ StartPos = self.CurrentOffsetWithinLine
+ index = -1
+ if IgnoreCase:
+ index = self._CurrentLine()[self.CurrentOffsetWithinLine: ].upper().find(KeyWord.upper())
+ else:
+ index = self._CurrentLine()[self.CurrentOffsetWithinLine: ].find(KeyWord)
+ if index == 0:
+ followingChar = self._CurrentLine()[self.CurrentOffsetWithinLine + len(KeyWord)]
+ if not str(followingChar).isspace() and followingChar not in SEPARATORS:
+ return False
+ self.CurrentOffsetWithinLine += len(KeyWord)
+ self._Token = self._CurrentLine()[StartPos: self.CurrentOffsetWithinLine]
+ return True
+ return False
+
+ def _GetExpression(self):
+ Line = self.Profile.FileLinesList[self.CurrentLineNumber - 1]
+ Index = len(Line) - 1
+ while Line[Index] in CR_LB_SET:
+ Index -= 1
+ ExpressionString = self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine:Index+1]
+ self.CurrentOffsetWithinLine += len(ExpressionString)
+ ExpressionString = ExpressionString.strip()
+ return ExpressionString
+
+ ## _GetNextWord() method
+ #
+ # Get next C name from file lines
+ # If found, the string value is put into self._Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a C name string, file buffer pointer moved forward
+ # @retval False Not able to find a C name string, file buffer pointer not changed
+ #
+ def _GetNextWord(self):
+ self._SkipWhiteSpace()
+ if self._EndOfFile():
+ return False
+
+ TempChar = self._CurrentChar()
+ StartPos = self.CurrentOffsetWithinLine
+ if (TempChar >= 'a' and TempChar <= 'z') or (TempChar >= 'A' and TempChar <= 'Z') or TempChar == '_':
+ self._GetOneChar()
+ while not self._EndOfLine():
+ TempChar = self._CurrentChar()
+ if (TempChar >= 'a' and TempChar <= 'z') or (TempChar >= 'A' and TempChar <= 'Z') \
+ or (TempChar >= '0' and TempChar <= '9') or TempChar == '_' or TempChar == '-':
+ self._GetOneChar()
+
+ else:
+ break
+
+ self._Token = self._CurrentLine()[StartPos: self.CurrentOffsetWithinLine]
+ return True
+
+ return False
+
+ def _GetNextPcdWord(self):
+ self._SkipWhiteSpace()
+ if self._EndOfFile():
+ return False
+
+ TempChar = self._CurrentChar()
+ StartPos = self.CurrentOffsetWithinLine
+ if (TempChar >= 'a' and TempChar <= 'z') or (TempChar >= 'A' and TempChar <= 'Z') or TempChar == '_' or TempChar == TAB_SECTION_START or TempChar == TAB_SECTION_END:
+ self._GetOneChar()
+ while not self._EndOfLine():
+ TempChar = self._CurrentChar()
+ if (TempChar >= 'a' and TempChar <= 'z') or (TempChar >= 'A' and TempChar <= 'Z') \
+ or (TempChar >= '0' and TempChar <= '9') or TempChar == '_' or TempChar == '-' or TempChar == TAB_SECTION_START or TempChar == TAB_SECTION_END:
+ self._GetOneChar()
+
+ else:
+ break
+
+ self._Token = self._CurrentLine()[StartPos: self.CurrentOffsetWithinLine]
+ return True
+
+ return False
+
+ ## _GetNextToken() method
+ #
+ # Get next token unit before a separator
+ # If found, the string value is put into self._Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a token unit, file buffer pointer moved forward
+ # @retval False Not able to find a token unit, file buffer pointer not changed
+ #
+ def _GetNextToken(self):
+ # Skip leading spaces, if exist.
+ self._SkipWhiteSpace()
+ if self._EndOfFile():
+ return False
+ # Record the token start position, the position of the first non-space char.
+ StartPos = self.CurrentOffsetWithinLine
+ StartLine = self.CurrentLineNumber
+ while StartLine == self.CurrentLineNumber:
+ TempChar = self._CurrentChar()
+ # Try to find the end char that is not a space and not in separator tuple.
+ # That is, when we got a space or any char in the tuple, we got the end of token.
+ if not str(TempChar).isspace() and TempChar not in SEPARATORS:
+ self._GetOneChar()
+ # if we happen to meet a separator as the first char, we must proceed to get it.
+ # That is, we get a token that is a separator char. normally it is the boundary of other tokens.
+ elif StartPos == self.CurrentOffsetWithinLine and TempChar in SEPARATORS:
+ self._GetOneChar()
+ break
+ else:
+ break
+# else:
+# return False
+
+ EndPos = self.CurrentOffsetWithinLine
+ if self.CurrentLineNumber != StartLine:
+ EndPos = len(self.Profile.FileLinesList[StartLine-1])
+ self._Token = self.Profile.FileLinesList[StartLine-1][StartPos: EndPos]
+ if self._Token.lower() in {TAB_IF, TAB_END_IF, TAB_ELSE_IF, TAB_ELSE, TAB_IF_DEF, TAB_IF_N_DEF, TAB_ERROR, TAB_INCLUDE}:
+ self._Token = self._Token.lower()
+ if StartPos != self.CurrentOffsetWithinLine:
+ return True
+ else:
+ return False
+
+ ## _GetNextGuid() method
+ #
+ # Get next token unit before a separator
+ # If found, the GUID string is put into self._Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a registry format GUID, file buffer pointer moved forward
+ # @retval False Not able to find a registry format GUID, file buffer pointer not changed
+ #
+ def _GetNextGuid(self):
+ if not self._GetNextToken():
+ return False
+ if GlobalData.gGuidPattern.match(self._Token) is not None:
+ return True
+ elif self._Token in GlobalData.gGuidDict:
+ return True
+ else:
+ self._UndoToken()
+ return False
+
+ @staticmethod
+ def _Verify(Name, Value, Scope):
+ # value verification only applies to numeric values.
+ if Scope not in TAB_PCD_NUMERIC_TYPES:
+ return
+
+ ValueNumber = 0
+ try:
+ ValueNumber = int(Value, 0)
+ except:
+ EdkLogger.error("FdfParser", FORMAT_INVALID, "The value is not valid dec or hex number for %s." % Name)
+ if ValueNumber < 0:
+ EdkLogger.error("FdfParser", FORMAT_INVALID, "The value can't be set to negative value for %s." % Name)
+ if ValueNumber > MAX_VAL_TYPE[Scope]:
+ EdkLogger.error("FdfParser", FORMAT_INVALID, "Too large value for %s." % Name)
+ return True
+
+ ## _UndoToken() method
+ #
+ # Go back one token unit in file buffer
+ #
+ # @param self The object pointer
+ #
+ def _UndoToken(self):
+ self._UndoOneChar()
+ while self._CurrentChar().isspace():
+ if not self._UndoOneChar():
+ self._GetOneChar()
+ return
+
+
+ StartPos = self.CurrentOffsetWithinLine
+ CurrentLine = self.CurrentLineNumber
+ while CurrentLine == self.CurrentLineNumber:
+
+ TempChar = self._CurrentChar()
+ # Try to find the end char that is not a space and not in separator tuple.
+ # That is, when we got a space or any char in the tuple, we got the end of token.
+ if not str(TempChar).isspace() and not TempChar in SEPARATORS:
+ if not self._UndoOneChar():
+ return
+ # if we happen to meet a separator as the first char, we must proceed to get it.
+ # That is, we get a token that is a separator char. normally it is the boundary of other tokens.
+ elif StartPos == self.CurrentOffsetWithinLine and TempChar in SEPARATORS:
+ return
+ else:
+ break
+
+ self._GetOneChar()
+
+ ## _GetNextHexNumber() method
+ #
+ # Get next HEX data before a separator
+ # If found, the HEX data is put into self._Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a HEX data, file buffer pointer moved forward
+ # @retval False Not able to find a HEX data, file buffer pointer not changed
+ #
+ def _GetNextHexNumber(self):
+ if not self._GetNextToken():
+ return False
+ if GlobalData.gHexPatternAll.match(self._Token):
+ return True
+ else:
+ self._UndoToken()
+ return False
+
+ ## _GetNextDecimalNumber() method
+ #
+ # Get next decimal data before a separator
+ # If found, the decimal data is put into self._Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a decimal data, file buffer pointer moved forward
+ # @retval False Not able to find a decimal data, file buffer pointer not changed
+ #
+ def _GetNextDecimalNumber(self):
+ if not self._GetNextToken():
+ return False
+ if self._Token.isdigit():
+ return True
+ else:
+ self._UndoToken()
+ return False
+
+ def _GetNextPcdSettings(self):
+ if not self._GetNextWord():
+ raise Warning.Expected("<PcdTokenSpaceCName>", self.FileName, self.CurrentLineNumber)
+ pcdTokenSpaceCName = self._Token
+
+ if not self._IsToken(TAB_SPLIT):
+ raise Warning.Expected(".", self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextWord():
+ raise Warning.Expected("<PcdCName>", self.FileName, self.CurrentLineNumber)
+ pcdCName = self._Token
+
+ Fields = []
+ while self._IsToken(TAB_SPLIT):
+ if not self._GetNextPcdWord():
+ raise Warning.Expected("Pcd Fields", self.FileName, self.CurrentLineNumber)
+ Fields.append(self._Token)
+
+ return (pcdCName, pcdTokenSpaceCName,TAB_SPLIT.join(Fields))
+
+ ## _GetStringData() method
+ #
+ # Get string contents quoted in ""
+ # If found, the decimal data is put into self._Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a string data, file buffer pointer moved forward
+ # @retval False Not able to find a string data, file buffer pointer not changed
+ #
+ def _GetStringData(self):
+ QuoteToUse = None
+ if self._Token.startswith(T_CHAR_DOUBLE_QUOTE) or self._Token.startswith("L\""):
+ QuoteToUse = T_CHAR_DOUBLE_QUOTE
+ elif self._Token.startswith(T_CHAR_SINGLE_QUOTE) or self._Token.startswith("L\'"):
+ QuoteToUse = T_CHAR_SINGLE_QUOTE
+ else:
+ return False
+
+ self._UndoToken()
+ self._SkipToToken(QuoteToUse)
+ currentLineNumber = self.CurrentLineNumber
+
+ if not self._SkipToToken(QuoteToUse):
+ raise Warning(QuoteToUse, self.FileName, self.CurrentLineNumber)
+ if currentLineNumber != self.CurrentLineNumber:
+ raise Warning(QuoteToUse, self.FileName, self.CurrentLineNumber)
+ self._Token = self._SkippedChars.rstrip(QuoteToUse)
+ return True
+
+ ## _SkipToToken() method
+ #
+ # Search forward in file buffer for the string
+ # The skipped chars are put into self._SkippedChars
+ #
+ # @param self The object pointer
+ # @param String The string to search
+ # @param IgnoreCase Indicate case sensitive/non-sensitive search, default is case sensitive
+ # @retval True Successfully find the string, file buffer pointer moved forward
+ # @retval False Not able to find the string, file buffer pointer not changed
+ #
+ def _SkipToToken(self, String, IgnoreCase = False):
+ StartPos = self.GetFileBufferPos()
+
+ self._SkippedChars = ""
+ while not self._EndOfFile():
+ index = -1
+ if IgnoreCase:
+ index = self._CurrentLine()[self.CurrentOffsetWithinLine: ].upper().find(String.upper())
+ else:
+ index = self._CurrentLine()[self.CurrentOffsetWithinLine: ].find(String)
+ if index == 0:
+ self.CurrentOffsetWithinLine += len(String)
+ self._SkippedChars += String
+ return True
+ self._SkippedChars += str(self._CurrentChar())
+ self._GetOneChar()
+
+ self.SetFileBufferPos(StartPos)
+ self._SkippedChars = ""
+ return False
+
+ ## GetFileBufferPos() method
+ #
+ # Return the tuple of current line and offset within the line
+ #
+ # @param self The object pointer
+ # @retval Tuple Line number and offset pair
+ #
+ def GetFileBufferPos(self):
+ return (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ ## SetFileBufferPos() method
+ #
+ # Restore the file buffer position
+ #
+ # @param self The object pointer
+ # @param Pos The new file buffer position
+ #
+ def SetFileBufferPos(self, Pos):
+ (self.CurrentLineNumber, self.CurrentOffsetWithinLine) = Pos
+
+ ## Preprocess() method
+ #
+ # Preprocess comment, conditional directive, include directive, replace macro.
+ # Exception will be raised if syntax error found
+ #
+ # @param self The object pointer
+ #
+ def Preprocess(self):
+ self._StringToList()
+ self.PreprocessFile()
+ self.PreprocessIncludeFile()
+ self._StringToList()
+ self.PreprocessFile()
+ self.PreprocessConditionalStatement()
+ self._StringToList()
+ for Pos in self._WipeOffArea:
+ self._ReplaceFragment(Pos[0], Pos[1])
+ self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
+
+ while self._GetDefines():
+ pass
+
+ ## ParseFile() method
+ #
+ # Parse the file profile buffer to extract fd, fv ... information
+ # Exception will be raised if syntax error found
+ #
+ # @param self The object pointer
+ #
+ def ParseFile(self):
+ try:
+ self.Preprocess()
+ self._GetError()
+ #
+ # Keep processing sections of the FDF until no new sections or a syntax error is found
+ #
+ while self._GetFd() or self._GetFv() or self._GetFmp() or self._GetCapsule() or self._GetRule() or self._GetOptionRom():
+ pass
+
+ except Warning as X:
+ self._UndoToken()
+ #'\n\tGot Token: \"%s\" from File %s\n' % (self._Token, FileLineTuple[0]) + \
+ # At this point, the closest parent would be the included file itself
+ Profile = GetParentAtLine(X.OriginalLineNumber)
+ if Profile is not None:
+ X.Message += ' near line %d, column %d: %s' \
+ % (X.LineNumber, 0, Profile.FileLinesList[X.LineNumber-1])
+ else:
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ X.Message += ' near line %d, column %d: %s' \
+ % (FileLineTuple[1], self.CurrentOffsetWithinLine + 1, self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine:].rstrip(TAB_LINE_BREAK).rstrip(T_CHAR_CR))
+ raise
+
+ ## SectionParser() method
+ #
+ # Parse the file section info
+ # Exception will be raised if syntax error found
+ #
+ # @param self The object pointer
+ # @param section The section string
+
+ def SectionParser(self, section):
+ S = section.upper()
+ if not S.startswith("[DEFINES") and not S.startswith("[FD.") and not S.startswith("[FV.") and not S.startswith("[CAPSULE.") \
+ and not S.startswith("[RULE.") and not S.startswith("[OPTIONROM.") and not S.startswith('[FMPPAYLOAD.'):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [DEFINES], [FD.], [FV.], [Capsule.], [Rule.], [OptionRom.], [FMPPAYLOAD.])", self.FileName, self.CurrentLineNumber)
+
+ ## _GetDefines() method
+ #
+ # Get Defines section contents and store its data into AllMacrosList
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a Defines
+ # @retval False Not able to find a Defines
+ #
+ def _GetDefines(self):
+ if not self._GetNextToken():
+ return False
+
+ S = self._Token.upper()
+ if S.startswith(TAB_SECTION_START) and not S.startswith("[DEFINES"):
+ self.SectionParser(S)
+ self._UndoToken()
+ return False
+
+ self._UndoToken()
+ if not self._IsToken("[DEFINES", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ # % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine:], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning.Expected("[DEFINES", self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken(TAB_SECTION_END):
+ raise Warning.ExpectedBracketClose(self.FileName, self.CurrentLineNumber)
+
+ while self._GetNextWord():
+ # handle the SET statement
+ if self._Token == 'SET':
+ self._UndoToken()
+ self._GetSetStatement(None)
+ continue
+
+ Macro = self._Token
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken() or self._Token.startswith(TAB_SECTION_START):
+ raise Warning.Expected("MACRO value", self.FileName, self.CurrentLineNumber)
+ Value = self._Token
+
+ return False
+
+ ##_GetError() method
+ def _GetError(self):
+ #save the Current information
+ CurrentLine = self.CurrentLineNumber
+ CurrentOffset = self.CurrentOffsetWithinLine
+ while self._GetNextToken():
+ if self._Token == TAB_ERROR:
+ EdkLogger.error('FdfParser', ERROR_STATEMENT, self._CurrentLine().replace(TAB_ERROR, '', 1), File=self.FileName, Line=self.CurrentLineNumber)
+ self.CurrentLineNumber = CurrentLine
+ self.CurrentOffsetWithinLine = CurrentOffset
+
+ ## _GetFd() method
+ #
+ # Get FD section contents and store its data into FD dictionary of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a FD
+ # @retval False Not able to find a FD
+ #
+ def _GetFd(self):
+ if not self._GetNextToken():
+ return False
+
+ S = self._Token.upper()
+ if S.startswith(TAB_SECTION_START) and not S.startswith("[FD."):
+ if not S.startswith("[FV.") and not S.startswith('[FMPPAYLOAD.') and not S.startswith("[CAPSULE.") \
+ and not S.startswith("[RULE.") and not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section", self.FileName, self.CurrentLineNumber)
+ self._UndoToken()
+ return False
+
+ self._UndoToken()
+ if not self._IsToken("[FD.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ # % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine:], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning.Expected("[FD.]", self.FileName, self.CurrentLineNumber)
+
+ FdName = self._GetUiName()
+ if FdName == "":
+ if len (self.Profile.FdDict) == 0:
+ FdName = GenFdsGlobalVariable.PlatformName
+ if FdName == "" and GlobalData.gActivePlatform:
+ FdName = GlobalData.gActivePlatform.PlatformName
+ self.Profile.FdNameNotSet = True
+ else:
+ raise Warning.Expected("FdName in [FD.] section", self.FileName, self.CurrentLineNumber)
+ self.CurrentFdName = FdName.upper()
+
+ if self.CurrentFdName in self.Profile.FdDict:
+ raise Warning("Unexpected the same FD name", self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken(TAB_SECTION_END):
+ raise Warning.ExpectedBracketClose(self.FileName, self.CurrentLineNumber)
+
+ FdObj = FD()
+ FdObj.FdUiName = self.CurrentFdName
+ self.Profile.FdDict[self.CurrentFdName] = FdObj
+
+ if len (self.Profile.FdDict) > 1 and self.Profile.FdNameNotSet:
+ raise Warning.Expected("all FDs have their name", self.FileName, self.CurrentLineNumber)
+
+ Status = self._GetCreateFile(FdObj)
+ if not Status:
+ raise Warning("FD name error", self.FileName, self.CurrentLineNumber)
+
+ while self._GetTokenStatements(FdObj):
+ pass
+ for Attr in ("BaseAddress", "Size", "ErasePolarity"):
+ if getattr(FdObj, Attr) is None:
+ self._GetNextToken()
+ raise Warning("Keyword %s missing" % Attr, self.FileName, self.CurrentLineNumber)
+
+ if not FdObj.BlockSizeList:
+ FdObj.BlockSizeList.append((1, FdObj.Size, None))
+
+ self._GetDefineStatements(FdObj)
+
+ self._GetSetStatements(FdObj)
+
+ if not self._GetRegionLayout(FdObj):
+ raise Warning.Expected("region layout", self.FileName, self.CurrentLineNumber)
+
+ while self._GetRegionLayout(FdObj):
+ pass
+ return True
+
+ ## _GetUiName() method
+ #
+ # Return the UI name of a section
+ #
+ # @param self The object pointer
+ # @retval FdName UI name
+ #
+ def _GetUiName(self):
+ Name = ""
+ if self._GetNextWord():
+ Name = self._Token
+
+ return Name
+
+ ## _GetCreateFile() method
+ #
+ # Return the output file name of object
+ #
+ # @param self The object pointer
+ # @param Obj object whose data will be stored in file
+ # @retval FdName UI name
+ #
+ def _GetCreateFile(self, Obj):
+ if self._IsKeyword("CREATE_FILE"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("file name", self.FileName, self.CurrentLineNumber)
+
+ FileName = self._Token
+ Obj.CreateFileName = FileName
+
+ return True
+
+ def SetPcdLocalation(self,pcdpair):
+ self.Profile.PcdLocalDict[pcdpair] = (self.Profile.FileName,self.CurrentLineNumber)
+
+ ## _GetTokenStatements() method
+ #
+ # Get token statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom token statement is got
+ #
+ def _GetTokenStatements(self, Obj):
+ if self._IsKeyword("BaseAddress"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextHexNumber():
+ raise Warning.Expected("Hex base address", self.FileName, self.CurrentLineNumber)
+
+ Obj.BaseAddress = self._Token
+
+ if self._IsToken(TAB_VALUE_SPLIT):
+ pcdPair = self._GetNextPcdSettings()
+ Obj.BaseAddressPcd = pcdPair
+ self.Profile.PcdDict[pcdPair] = Obj.BaseAddress
+ self.SetPcdLocalation(pcdPair)
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[pcdPair] = FileLineTuple
+ return True
+
+ if self._IsKeyword("Size"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextHexNumber():
+ raise Warning.Expected("Hex size", self.FileName, self.CurrentLineNumber)
+
+ Size = self._Token
+ if self._IsToken(TAB_VALUE_SPLIT):
+ pcdPair = self._GetNextPcdSettings()
+ Obj.SizePcd = pcdPair
+ self.Profile.PcdDict[pcdPair] = Size
+ self.SetPcdLocalation(pcdPair)
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[pcdPair] = FileLineTuple
+ Obj.Size = int(Size, 0)
+ return True
+
+ if self._IsKeyword("ErasePolarity"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("Erase Polarity", self.FileName, self.CurrentLineNumber)
+
+ if not self._Token in {"1", "0"}:
+ raise Warning.Expected("1 or 0 Erase Polarity", self.FileName, self.CurrentLineNumber)
+
+ Obj.ErasePolarity = self._Token
+ return True
+
+ return self._GetBlockStatements(Obj)
+
+ ## _GetAddressStatements() method
+ #
+ # Get address statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom address statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def _GetAddressStatements(self, Obj):
+ if self._IsKeyword("BsBaseAddress"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextDecimalNumber() and not self._GetNextHexNumber():
+ raise Warning.Expected("address", self.FileName, self.CurrentLineNumber)
+
+ BsAddress = int(self._Token, 0)
+ Obj.BsBaseAddress = BsAddress
+
+ if self._IsKeyword("RtBaseAddress"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextDecimalNumber() and not self._GetNextHexNumber():
+ raise Warning.Expected("address", self.FileName, self.CurrentLineNumber)
+
+ RtAddress = int(self._Token, 0)
+ Obj.RtBaseAddress = RtAddress
+
+ ## _GetBlockStatements() method
+ #
+ # Get block statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom block statement is got
+ #
+ def _GetBlockStatements(self, Obj):
+ IsBlock = False
+ while self._GetBlockStatement(Obj):
+ IsBlock = True
+
+ Item = Obj.BlockSizeList[-1]
+ if Item[0] is None or Item[1] is None:
+ raise Warning.Expected("block statement", self.FileName, self.CurrentLineNumber)
+ return IsBlock
+
+ ## _GetBlockStatement() method
+ #
+ # Get block statement
+ #
+ # @param self The object pointer
+ # @param Obj for whom block statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def _GetBlockStatement(self, Obj):
+ if not self._IsKeyword("BlockSize"):
+ return False
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextHexNumber() and not self._GetNextDecimalNumber():
+ raise Warning.Expected("Hex or Integer block size", self.FileName, self.CurrentLineNumber)
+
+ BlockSize = self._Token
+ BlockSizePcd = None
+ if self._IsToken(TAB_VALUE_SPLIT):
+ PcdPair = self._GetNextPcdSettings()
+ BlockSizePcd = PcdPair
+ self.Profile.PcdDict[PcdPair] = BlockSize
+ self.SetPcdLocalation(PcdPair)
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[PcdPair] = FileLineTuple
+ BlockSize = int(BlockSize, 0)
+
+ BlockNumber = None
+ if self._IsKeyword("NumBlocks"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextDecimalNumber() and not self._GetNextHexNumber():
+ raise Warning.Expected("block numbers", self.FileName, self.CurrentLineNumber)
+
+ BlockNumber = int(self._Token, 0)
+
+ Obj.BlockSizeList.append((BlockSize, BlockNumber, BlockSizePcd))
+ return True
+
+ ## _GetDefineStatements() method
+ #
+ # Get define statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom define statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def _GetDefineStatements(self, Obj):
+ while self._GetDefineStatement(Obj):
+ pass
+
+ ## _GetDefineStatement() method
+ #
+ # Get define statement
+ #
+ # @param self The object pointer
+ # @param Obj for whom define statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def _GetDefineStatement(self, Obj):
+ if self._IsKeyword(TAB_DEFINE):
+ self._GetNextToken()
+ Macro = self._Token
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("value", self.FileName, self.CurrentLineNumber)
+
+ Value = self._Token
+ Macro = '$(' + Macro + ')'
+ Obj.DefineVarDict[Macro] = Value
+ return True
+
+ return False
+
+ ## _GetSetStatements() method
+ #
+ # Get set statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom set statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def _GetSetStatements(self, Obj):
+ while self._GetSetStatement(Obj):
+ pass
+
+ ## _GetSetStatement() method
+ #
+ # Get set statement
+ #
+ # @param self The object pointer
+ # @param Obj for whom set statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def _GetSetStatement(self, Obj):
+ if self._IsKeyword("SET"):
+ PcdPair = self._GetNextPcdSettings()
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ Value = self._GetExpression()
+ Value = self._EvaluateConditional(Value, self.CurrentLineNumber, 'eval', True)
+
+ if Obj:
+ Obj.SetVarDict[PcdPair] = Value
+ self.Profile.PcdDict[PcdPair] = Value
+ self.SetPcdLocalation(PcdPair)
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[PcdPair] = FileLineTuple
+ return True
+
+ return False
+
+ ## _CalcRegionExpr(self)
+ #
+ # Calculate expression for offset or size of a region
+ #
+ # @return: None if invalid expression
+ # Calculated number if successfully
+ #
+ def _CalcRegionExpr(self):
+ StartPos = self.GetFileBufferPos()
+ Expr = ''
+ PairCount = 0
+ while not self._EndOfFile():
+ CurCh = self._CurrentChar()
+ if CurCh == '(':
+ PairCount += 1
+ elif CurCh == ')':
+ PairCount -= 1
+
+ if CurCh in '|\r\n' and PairCount == 0:
+ break
+ Expr += CurCh
+ self._GetOneChar()
+ try:
+ return int(
+ ValueExpression(Expr,
+ self._CollectMacroPcd()
+ )(True), 0)
+ except Exception:
+ self.SetFileBufferPos(StartPos)
+ return None
+
+ ## _GetRegionLayout() method
+ #
+ # Get region layout for FD
+ #
+ # @param self The object pointer
+ # @param theFd for whom region is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def _GetRegionLayout(self, theFd):
+ Offset = self._CalcRegionExpr()
+ if Offset is None:
+ return False
+
+ RegionObj = Region()
+ RegionObj.Offset = Offset
+ theFd.RegionList.append(RegionObj)
+
+ if not self._IsToken(TAB_VALUE_SPLIT):
+ raise Warning.Expected("'|'", self.FileName, self.CurrentLineNumber)
+
+ Size = self._CalcRegionExpr()
+ if Size is None:
+ raise Warning.Expected("Region Size", self.FileName, self.CurrentLineNumber)
+ RegionObj.Size = Size
+
+ if not self._GetNextWord():
+ return True
+
+ if not self._Token in {"SET", BINARY_FILE_TYPE_FV, "FILE", "DATA", "CAPSULE", "INF"}:
+ #
+ # If next token is a word which is not a valid FV type, it might be part of [PcdOffset[|PcdSize]]
+ # Or it might be next region's offset described by an expression which starts with a PCD.
+ # PcdOffset[|PcdSize] or OffsetPcdExpression|Size
+ #
+ self._UndoToken()
+ IsRegionPcd = (RegionSizeGuidPattern.match(self._CurrentLine()[self.CurrentOffsetWithinLine:]) or
+ RegionOffsetPcdPattern.match(self._CurrentLine()[self.CurrentOffsetWithinLine:]))
+ if IsRegionPcd:
+ RegionObj.PcdOffset = self._GetNextPcdSettings()
+ self.Profile.PcdDict[RegionObj.PcdOffset] = "0x%08X" % (RegionObj.Offset + int(theFd.BaseAddress, 0))
+ self.SetPcdLocalation(RegionObj.PcdOffset)
+ self._PcdDict['%s.%s' % (RegionObj.PcdOffset[1], RegionObj.PcdOffset[0])] = "0x%x" % RegionObj.Offset
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[RegionObj.PcdOffset] = FileLineTuple
+ if self._IsToken(TAB_VALUE_SPLIT):
+ RegionObj.PcdSize = self._GetNextPcdSettings()
+ self.Profile.PcdDict[RegionObj.PcdSize] = "0x%08X" % RegionObj.Size
+ self.SetPcdLocalation(RegionObj.PcdSize)
+ self._PcdDict['%s.%s' % (RegionObj.PcdSize[1], RegionObj.PcdSize[0])] = "0x%x" % RegionObj.Size
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[RegionObj.PcdSize] = FileLineTuple
+
+ if not self._GetNextWord():
+ return True
+
+ if self._Token == "SET":
+ self._UndoToken()
+ self._GetSetStatements(RegionObj)
+ if not self._GetNextWord():
+ return True
+
+ elif self._Token == BINARY_FILE_TYPE_FV:
+ self._UndoToken()
+ self._GetRegionFvType(RegionObj)
+
+ elif self._Token == "CAPSULE":
+ self._UndoToken()
+ self._GetRegionCapType(RegionObj)
+
+ elif self._Token == "FILE":
+ self._UndoToken()
+ self._GetRegionFileType(RegionObj)
+
+ elif self._Token == "INF":
+ self._UndoToken()
+ RegionObj.RegionType = "INF"
+ while self._IsKeyword("INF"):
+ self._UndoToken()
+ ffsInf = self._ParseInfStatement()
+ if not ffsInf:
+ break
+ RegionObj.RegionDataList.append(ffsInf)
+
+ elif self._Token == "DATA":
+ self._UndoToken()
+ self._GetRegionDataType(RegionObj)
+ else:
+ self._UndoToken()
+ if self._GetRegionLayout(theFd):
+ return True
+ raise Warning("A valid region type was not found. "
+ "Valid types are [SET, FV, CAPSULE, FILE, DATA, INF]. This error occurred",
+ self.FileName, self.CurrentLineNumber)
+
+ return True
+
+ ## _GetRegionFvType() method
+ #
+ # Get region fv data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def _GetRegionFvType(self, RegionObj):
+ if not self._IsKeyword(BINARY_FILE_TYPE_FV):
+ raise Warning.Expected("'FV'", self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("FV name", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionType = BINARY_FILE_TYPE_FV
+ RegionObj.RegionDataList.append((self._Token).upper())
+
+ while self._IsKeyword(BINARY_FILE_TYPE_FV):
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("FV name", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionDataList.append((self._Token).upper())
+
+ ## _GetRegionCapType() method
+ #
+ # Get region capsule data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def _GetRegionCapType(self, RegionObj):
+ if not self._IsKeyword("CAPSULE"):
+ raise Warning.Expected("'CAPSULE'", self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("CAPSULE name", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionType = "CAPSULE"
+ RegionObj.RegionDataList.append(self._Token)
+
+ while self._IsKeyword("CAPSULE"):
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("CAPSULE name", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionDataList.append(self._Token)
+
+ ## _GetRegionFileType() method
+ #
+ # Get region file data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def _GetRegionFileType(self, RegionObj):
+ if not self._IsKeyword("FILE"):
+ raise Warning.Expected("'FILE'", self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("File name", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionType = "FILE"
+ RegionObj.RegionDataList.append(self._Token)
+
+ while self._IsKeyword("FILE"):
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("FILE name", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionDataList.append(self._Token)
+
+ ## _GetRegionDataType() method
+ #
+ # Get region array data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def _GetRegionDataType(self, RegionObj):
+ if not self._IsKeyword("DATA"):
+ raise Warning.Expected("Region Data type", self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken("{"):
+ raise Warning.ExpectedCurlyOpen(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextHexNumber():
+ raise Warning.Expected("Hex byte", self.FileName, self.CurrentLineNumber)
+
+ if len(self._Token) > 18:
+ raise Warning("Hex string can't be converted to a valid UINT64 value", self.FileName, self.CurrentLineNumber)
+
+ # convert hex string value to byte hex string array
+ AllString = self._Token
+ AllStrLen = len (AllString)
+ DataString = ""
+ while AllStrLen > 4:
+ DataString = DataString + "0x" + AllString[AllStrLen - 2: AllStrLen] + TAB_COMMA_SPLIT
+ AllStrLen = AllStrLen - 2
+ DataString = DataString + AllString[:AllStrLen] + TAB_COMMA_SPLIT
+
+ # byte value array
+ if len (self._Token) <= 4:
+ while self._IsToken(TAB_COMMA_SPLIT):
+ if not self._GetNextHexNumber():
+ raise Warning("Invalid Hex number", self.FileName, self.CurrentLineNumber)
+ if len(self._Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
+ DataString += self._Token
+ DataString += TAB_COMMA_SPLIT
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+
+ DataString = DataString.rstrip(TAB_COMMA_SPLIT)
+ RegionObj.RegionType = "DATA"
+ RegionObj.RegionDataList.append(DataString)
+
+ while self._IsKeyword("DATA"):
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken("{"):
+ raise Warning.ExpectedCurlyOpen(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextHexNumber():
+ raise Warning.Expected("Hex byte", self.FileName, self.CurrentLineNumber)
+
+ if len(self._Token) > 18:
+ raise Warning("Hex string can't be converted to a valid UINT64 value", self.FileName, self.CurrentLineNumber)
+
+ # convert hex string value to byte hex string array
+ AllString = self._Token
+ AllStrLen = len (AllString)
+ DataString = ""
+ while AllStrLen > 4:
+ DataString = DataString + "0x" + AllString[AllStrLen - 2: AllStrLen] + TAB_COMMA_SPLIT
+ AllStrLen = AllStrLen - 2
+ DataString = DataString + AllString[:AllStrLen] + TAB_COMMA_SPLIT
+
+ # byte value array
+ if len (self._Token) <= 4:
+ while self._IsToken(TAB_COMMA_SPLIT):
+ if not self._GetNextHexNumber():
+ raise Warning("Invalid Hex number", self.FileName, self.CurrentLineNumber)
+ if len(self._Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
+ DataString += self._Token
+ DataString += TAB_COMMA_SPLIT
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+
+ DataString = DataString.rstrip(TAB_COMMA_SPLIT)
+ RegionObj.RegionDataList.append(DataString)
+
+ ## _GetFv() method
+ #
+ # Get FV section contents and store its data into FV dictionary of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a FV
+ # @retval False Not able to find a FV
+ #
+ def _GetFv(self):
+ if not self._GetNextToken():
+ return False
+
+ S = self._Token.upper()
+ if S.startswith(TAB_SECTION_START) and not S.startswith("[FV."):
+ self.SectionParser(S)
+ self._UndoToken()
+ return False
+
+ self._UndoToken()
+ if not self._IsToken("[FV.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ # % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine:], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("Unknown Keyword '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+
+ FvName = self._GetUiName()
+ self.CurrentFvName = FvName.upper()
+
+ if not self._IsToken(TAB_SECTION_END):
+ raise Warning.ExpectedBracketClose(self.FileName, self.CurrentLineNumber)
+
+ FvObj = FV(Name=self.CurrentFvName)
+ self.Profile.FvDict[self.CurrentFvName] = FvObj
+
+ Status = self._GetCreateFile(FvObj)
+ if not Status:
+ raise Warning("FV name error", self.FileName, self.CurrentLineNumber)
+
+ self._GetDefineStatements(FvObj)
+
+ self._GetAddressStatements(FvObj)
+
+ while True:
+ self._GetSetStatements(FvObj)
+
+ if not (self._GetBlockStatement(FvObj) or self._GetFvBaseAddress(FvObj) or
+ self._GetFvForceRebase(FvObj) or self._GetFvAlignment(FvObj) or
+ self._GetFvAttributes(FvObj) or self._GetFvNameGuid(FvObj) or
+ self._GetFvExtEntryStatement(FvObj) or self._GetFvNameString(FvObj)):
+ break
+
+ if FvObj.FvNameString == 'TRUE' and not FvObj.FvNameGuid:
+ raise Warning("FvNameString found but FvNameGuid was not found", self.FileName, self.CurrentLineNumber)
+
+ self._GetAprioriSection(FvObj)
+ self._GetAprioriSection(FvObj)
+
+ while True:
+ isInf = self._GetInfStatement(FvObj)
+ isFile = self._GetFileStatement(FvObj)
+ if not isInf and not isFile:
+ break
+
+ return True
+
+ ## _GetFvAlignment() method
+ #
+ # Get alignment for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom alignment is got
+ # @retval True Successfully find a alignment statement
+ # @retval False Not able to find a alignment statement
+ #
+ def _GetFvAlignment(self, Obj):
+ if not self._IsKeyword("FvAlignment"):
+ return False
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("alignment value", self.FileName, self.CurrentLineNumber)
+
+ if self._Token.upper() not in {"1", "2", "4", "8", "16", "32", "64", "128", "256", "512", \
+ "1K", "2K", "4K", "8K", "16K", "32K", "64K", "128K", "256K", "512K", \
+ "1M", "2M", "4M", "8M", "16M", "32M", "64M", "128M", "256M", "512M", \
+ "1G", "2G"}:
+ raise Warning("Unknown alignment value '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+ Obj.FvAlignment = self._Token
+ return True
+
+ ## _GetFvBaseAddress() method
+ #
+ # Get BaseAddress for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom FvBaseAddress is got
+ # @retval True Successfully find a FvBaseAddress statement
+ # @retval False Not able to find a FvBaseAddress statement
+ #
+ def _GetFvBaseAddress(self, Obj):
+ if not self._IsKeyword("FvBaseAddress"):
+ return False
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("FV base address value", self.FileName, self.CurrentLineNumber)
+
+ if not BaseAddrValuePattern.match(self._Token.upper()):
+ raise Warning("Unknown FV base address value '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+ Obj.FvBaseAddress = self._Token
+ return True
+
+ ## _GetFvForceRebase() method
+ #
+ # Get FvForceRebase for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom FvForceRebase is got
+ # @retval True Successfully find a FvForceRebase statement
+ # @retval False Not able to find a FvForceRebase statement
+ #
+ def _GetFvForceRebase(self, Obj):
+ if not self._IsKeyword("FvForceRebase"):
+ return False
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("FvForceRebase value", self.FileName, self.CurrentLineNumber)
+
+ if self._Token.upper() not in {"TRUE", "FALSE", "0", "0X0", "0X00", "1", "0X1", "0X01"}:
+ raise Warning("Unknown FvForceRebase value '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+
+ if self._Token.upper() in {"TRUE", "1", "0X1", "0X01"}:
+ Obj.FvForceRebase = True
+ elif self._Token.upper() in {"FALSE", "0", "0X0", "0X00"}:
+ Obj.FvForceRebase = False
+ else:
+ Obj.FvForceRebase = None
+
+ return True
+
+
+ ## _GetFvAttributes() method
+ #
+ # Get attributes for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom attribute is got
+ # @retval None
+ #
+ def _GetFvAttributes(self, FvObj):
+ IsWordToken = False
+ while self._GetNextWord():
+ IsWordToken = True
+ name = self._Token
+ if name not in {"ERASE_POLARITY", "MEMORY_MAPPED", \
+ "STICKY_WRITE", "LOCK_CAP", "LOCK_STATUS", "WRITE_ENABLED_CAP", \
+ "WRITE_DISABLED_CAP", "WRITE_STATUS", "READ_ENABLED_CAP", \
+ "READ_DISABLED_CAP", "READ_STATUS", "READ_LOCK_CAP", \
+ "READ_LOCK_STATUS", "WRITE_LOCK_CAP", "WRITE_LOCK_STATUS", \
+ "WRITE_POLICY_RELIABLE", "WEAK_ALIGNMENT", "FvUsedSizeEnable"}:
+ self._UndoToken()
+ return False
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken() or self._Token.upper() not in {"TRUE", "FALSE", "1", "0"}:
+ raise Warning.Expected("TRUE/FALSE (1/0)", self.FileName, self.CurrentLineNumber)
+
+ FvObj.FvAttributeDict[name] = self._Token
+
+ return IsWordToken
+
+ ## _GetFvNameGuid() method
+ #
+ # Get FV GUID for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom GUID is got
+ # @retval None
+ #
+ def _GetFvNameGuid(self, FvObj):
+ if not self._IsKeyword("FvNameGuid"):
+ return False
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextGuid():
+ raise Warning.Expected("GUID value", self.FileName, self.CurrentLineNumber)
+ if self._Token in GlobalData.gGuidDict:
+ self._Token = GuidStructureStringToGuidString(GlobalData.gGuidDict[self._Token]).upper()
+
+ FvObj.FvNameGuid = self._Token
+
+ return True
+
+ def _GetFvNameString(self, FvObj):
+ if not self._IsKeyword("FvNameString"):
+ return False
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken() or self._Token.upper() not in {'TRUE', 'FALSE'}:
+ raise Warning.Expected("TRUE or FALSE for FvNameString", self.FileName, self.CurrentLineNumber)
+
+ FvObj.FvNameString = self._Token
+
+ return True
+
+ def _GetFvExtEntryStatement(self, FvObj):
+ if not (self._IsKeyword("FV_EXT_ENTRY") or self._IsKeyword("FV_EXT_ENTRY_TYPE")):
+ return False
+
+ if not self._IsKeyword ("TYPE"):
+ raise Warning.Expected("'TYPE'", self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextHexNumber() and not self._GetNextDecimalNumber():
+ raise Warning.Expected("Hex FV extension entry type value At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvObj.FvExtEntryTypeValue.append(self._Token)
+
+ if not self._IsToken("{"):
+ raise Warning.ExpectedCurlyOpen(self.FileName, self.CurrentLineNumber)
+
+ if not self._IsKeyword("FILE") and not self._IsKeyword("DATA"):
+ raise Warning.Expected("'FILE' or 'DATA'", self.FileName, self.CurrentLineNumber)
+
+ FvObj.FvExtEntryType.append(self._Token)
+
+ if self._Token == 'DATA':
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken("{"):
+ raise Warning.ExpectedCurlyOpen(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextHexNumber():
+ raise Warning.Expected("Hex byte", self.FileName, self.CurrentLineNumber)
+
+ if len(self._Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
+
+ DataString = self._Token
+ DataString += TAB_COMMA_SPLIT
+
+ while self._IsToken(TAB_COMMA_SPLIT):
+ if not self._GetNextHexNumber():
+ raise Warning("Invalid Hex number", self.FileName, self.CurrentLineNumber)
+ if len(self._Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
+ DataString += self._Token
+ DataString += TAB_COMMA_SPLIT
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+
+ DataString = DataString.rstrip(TAB_COMMA_SPLIT)
+ FvObj.FvExtEntryData.append(DataString)
+
+ if self._Token == 'FILE':
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("FV Extension Entry file path At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvObj.FvExtEntryData.append(self._Token)
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+
+ return True
+
+ ## _GetAprioriSection() method
+ #
+ # Get token statements
+ #
+ # @param self The object pointer
+ # @param FvObj for whom apriori is got
+ # @retval True Successfully find apriori statement
+ # @retval False Not able to find apriori statement
+ #
+ def _GetAprioriSection(self, FvObj):
+ if not self._IsKeyword("APRIORI"):
+ return False
+
+ if not self._IsKeyword("PEI") and not self._IsKeyword("DXE"):
+ raise Warning.Expected("Apriori file type", self.FileName, self.CurrentLineNumber)
+ AprType = self._Token
+
+ if not self._IsToken("{"):
+ raise Warning.ExpectedCurlyOpen(self.FileName, self.CurrentLineNumber)
+
+ AprSectionObj = AprioriSection()
+ AprSectionObj.AprioriType = AprType
+
+ self._GetDefineStatements(AprSectionObj)
+
+ while True:
+ IsInf = self._GetInfStatement(AprSectionObj)
+ IsFile = self._GetFileStatement(AprSectionObj)
+ if not IsInf and not IsFile:
+ break
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+
+ FvObj.AprioriSectionList.append(AprSectionObj)
+ return True
+
+ def _ParseInfStatement(self):
+ if not self._IsKeyword("INF"):
+ return None
+
+ ffsInf = FfsInfStatement()
+ self._GetInfOptions(ffsInf)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("INF file path", self.FileName, self.CurrentLineNumber)
+ ffsInf.InfFileName = self._Token
+ if not ffsInf.InfFileName.endswith('.inf'):
+ raise Warning.Expected(".inf file path", self.FileName, self.CurrentLineNumber)
+
+ ffsInf.CurrentLineNum = self.CurrentLineNumber
+ ffsInf.CurrentLineContent = self._CurrentLine()
+
+ #Replace $(SAPCE) with real space
+ ffsInf.InfFileName = ffsInf.InfFileName.replace('$(SPACE)', ' ')
+
+ if ffsInf.InfFileName.replace(TAB_WORKSPACE, '').find('$') == -1:
+ #do case sensitive check for file path
+ ErrorCode, ErrorInfo = PathClass(NormPath(ffsInf.InfFileName), GenFdsGlobalVariable.WorkSpaceDir).Validate()
+ if ErrorCode != 0:
+ EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
+
+ NewFileName = ffsInf.InfFileName
+ if ffsInf.OverrideGuid:
+ NewFileName = ProcessDuplicatedInf(PathClass(ffsInf.InfFileName,GenFdsGlobalVariable.WorkSpaceDir), ffsInf.OverrideGuid, GenFdsGlobalVariable.WorkSpaceDir).Path
+
+ if not NewFileName in self.Profile.InfList:
+ self.Profile.InfList.append(NewFileName)
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.InfFileLineList.append(FileLineTuple)
+ if ffsInf.UseArch:
+ if ffsInf.UseArch not in self.Profile.InfDict:
+ self.Profile.InfDict[ffsInf.UseArch] = [ffsInf.InfFileName]
+ else:
+ self.Profile.InfDict[ffsInf.UseArch].append(ffsInf.InfFileName)
+ else:
+ self.Profile.InfDict['ArchTBD'].append(ffsInf.InfFileName)
+
+ if self._IsToken(TAB_VALUE_SPLIT):
+ if self._IsKeyword('RELOCS_STRIPPED'):
+ ffsInf.KeepReloc = False
+ elif self._IsKeyword('RELOCS_RETAINED'):
+ ffsInf.KeepReloc = True
+ else:
+ raise Warning("Unknown reloc strip flag '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+ return ffsInf
+
+ ## _GetInfStatement() method
+ #
+ # Get INF statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom inf statement is got
+ # @retval True Successfully find inf statement
+ # @retval False Not able to find inf statement
+ #
+ def _GetInfStatement(self, Obj, ForCapsule=False):
+ ffsInf = self._ParseInfStatement()
+ if not ffsInf:
+ return False
+
+ if ForCapsule:
+ myCapsuleFfs = CapsuleFfs()
+ myCapsuleFfs.Ffs = ffsInf
+ Obj.CapsuleDataList.append(myCapsuleFfs)
+ else:
+ Obj.FfsList.append(ffsInf)
+ return True
+
+ ## _GetInfOptions() method
+ #
+ # Get options for INF
+ #
+ # @param self The object pointer
+ # @param FfsInfObj for whom option is got
+ #
+ def _GetInfOptions(self, FfsInfObj):
+ if self._IsKeyword("FILE_GUID"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextGuid():
+ raise Warning.Expected("GUID value", self.FileName, self.CurrentLineNumber)
+ if self._Token in GlobalData.gGuidDict:
+ self._Token = GuidStructureStringToGuidString(GlobalData.gGuidDict[self._Token]).upper()
+ FfsInfObj.OverrideGuid = self._Token
+
+ if self._IsKeyword("RuleOverride"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken():
+ raise Warning.Expected("Rule name", self.FileName, self.CurrentLineNumber)
+ FfsInfObj.Rule = self._Token
+
+ if self._IsKeyword("VERSION"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken():
+ raise Warning.Expected("Version", self.FileName, self.CurrentLineNumber)
+
+ if self._GetStringData():
+ FfsInfObj.Version = self._Token
+
+ if self._IsKeyword(BINARY_FILE_TYPE_UI):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken():
+ raise Warning.Expected("UI name", self.FileName, self.CurrentLineNumber)
+
+ if self._GetStringData():
+ FfsInfObj.Ui = self._Token
+
+ if self._IsKeyword("USE"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken():
+ raise Warning.Expected("ARCH name", self.FileName, self.CurrentLineNumber)
+ FfsInfObj.UseArch = self._Token
+
+
+ if self._GetNextToken():
+ p = compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\))')
+ if p.match(self._Token) and p.match(self._Token).span()[1] == len(self._Token):
+ FfsInfObj.KeyStringList.append(self._Token)
+ if not self._IsToken(TAB_COMMA_SPLIT):
+ return
+ else:
+ self._UndoToken()
+ return
+
+ while self._GetNextToken():
+ if not p.match(self._Token):
+ raise Warning.Expected("KeyString \"Target_Tag_Arch\"", self.FileName, self.CurrentLineNumber)
+ FfsInfObj.KeyStringList.append(self._Token)
+
+ if not self._IsToken(TAB_COMMA_SPLIT):
+ break
+
+ ## _GetFileStatement() method
+ #
+ # Get FILE statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom FILE statement is got
+ # @retval True Successfully find FILE statement
+ # @retval False Not able to find FILE statement
+ #
+ def _GetFileStatement(self, Obj, ForCapsule = False):
+ if not self._IsKeyword("FILE"):
+ return False
+
+ if not self._GetNextWord():
+ raise Warning.Expected("FFS type", self.FileName, self.CurrentLineNumber)
+
+ if ForCapsule and self._Token == 'DATA':
+ self._UndoToken()
+ self._UndoToken()
+ return False
+
+ FfsFileObj = FileStatement()
+ FfsFileObj.FvFileType = self._Token
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextGuid():
+ if not self._GetNextWord():
+ raise Warning.Expected("File GUID", self.FileName, self.CurrentLineNumber)
+ if self._Token == 'PCD':
+ if not self._IsToken("("):
+ raise Warning.Expected("'('", self.FileName, self.CurrentLineNumber)
+ PcdPair = self._GetNextPcdSettings()
+ if not self._IsToken(")"):
+ raise Warning.Expected("')'", self.FileName, self.CurrentLineNumber)
+ self._Token = 'PCD('+PcdPair[1]+TAB_SPLIT+PcdPair[0]+')'
+
+ if self._Token in GlobalData.gGuidDict:
+ self._Token = GuidStructureStringToGuidString(GlobalData.gGuidDict[self._Token]).upper()
+ FfsFileObj.NameGuid = self._Token
+
+ self._GetFilePart(FfsFileObj)
+
+ if ForCapsule:
+ capsuleFfs = CapsuleFfs()
+ capsuleFfs.Ffs = FfsFileObj
+ Obj.CapsuleDataList.append(capsuleFfs)
+ else:
+ Obj.FfsList.append(FfsFileObj)
+
+ return True
+
+ ## _FileCouldHaveRelocFlag() method
+ #
+ # Check whether reloc strip flag can be set for a file type.
+ #
+ # @param FileType The file type to check with
+ # @retval True This type could have relocation strip flag
+ # @retval False No way to have it
+ #
+ @staticmethod
+ def _FileCouldHaveRelocFlag (FileType):
+ if FileType in {SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, SUP_MODULE_MM_CORE_STANDALONE, 'PEI_DXE_COMBO'}:
+ return True
+ else:
+ return False
+
+ ## _SectionCouldHaveRelocFlag() method
+ #
+ # Check whether reloc strip flag can be set for a section type.
+ #
+ # @param SectionType The section type to check with
+ # @retval True This type could have relocation strip flag
+ # @retval False No way to have it
+ #
+ @staticmethod
+ def _SectionCouldHaveRelocFlag (SectionType):
+ if SectionType in {BINARY_FILE_TYPE_TE, BINARY_FILE_TYPE_PE32}:
+ return True
+ else:
+ return False
+
+ ## _GetFilePart() method
+ #
+ # Get components for FILE statement
+ #
+ # @param self The object pointer
+ # @param FfsFileObj for whom component is got
+ #
+ def _GetFilePart(self, FfsFileObj):
+ self._GetFileOpts(FfsFileObj)
+
+ if not self._IsToken("{"):
+ if self._IsKeyword('RELOCS_STRIPPED') or self._IsKeyword('RELOCS_RETAINED'):
+ if self._FileCouldHaveRelocFlag(FfsFileObj.FvFileType):
+ if self._Token == 'RELOCS_STRIPPED':
+ FfsFileObj.KeepReloc = False
+ else:
+ FfsFileObj.KeepReloc = True
+ else:
+ raise Warning("File type %s could not have reloc strip flag%d" % (FfsFileObj.FvFileType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken("{"):
+ raise Warning.ExpectedCurlyOpen(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("File name or section data", self.FileName, self.CurrentLineNumber)
+
+ if self._Token == BINARY_FILE_TYPE_FV:
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken():
+ raise Warning.Expected("FV name", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FvName = self._Token
+
+ elif self._Token == "FD":
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken():
+ raise Warning.Expected("FD name", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FdName = self._Token
+
+ elif self._Token in {TAB_DEFINE, "APRIORI", "SECTION"}:
+ self._UndoToken()
+ self._GetSectionData(FfsFileObj)
+
+ elif hasattr(FfsFileObj, 'FvFileType') and FfsFileObj.FvFileType == 'RAW':
+ self._UndoToken()
+ self._GetRAWData(FfsFileObj)
+
+ else:
+ FfsFileObj.CurrentLineNum = self.CurrentLineNumber
+ FfsFileObj.CurrentLineContent = self._CurrentLine()
+ FfsFileObj.FileName = self._Token.replace('$(SPACE)', ' ')
+ self._VerifyFile(FfsFileObj.FileName)
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+
+ ## _GetRAWData() method
+ #
+ # Get RAW data for FILE statement
+ #
+ # @param self The object pointer
+ # @param FfsFileObj for whom section is got
+ #
+ def _GetRAWData(self, FfsFileObj):
+ FfsFileObj.FileName = []
+ FfsFileObj.SubAlignment = []
+ while True:
+ AlignValue = None
+ if self._GetAlignment():
+ if self._Token not in ALIGNMENTS:
+ raise Warning("Incorrect alignment '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+ #For FFS, Auto is default option same to ""
+ if not self._Token == "Auto":
+ AlignValue = self._Token
+ if not self._GetNextToken():
+ raise Warning.Expected("Filename value", self.FileName, self.CurrentLineNumber)
+
+ FileName = self._Token.replace('$(SPACE)', ' ')
+ if FileName == T_CHAR_BRACE_R:
+ self._UndoToken()
+ raise Warning.Expected("Filename value", self.FileName, self.CurrentLineNumber)
+
+ self._VerifyFile(FileName)
+ File = PathClass(NormPath(FileName), GenFdsGlobalVariable.WorkSpaceDir)
+ FfsFileObj.FileName.append(File.Path)
+ FfsFileObj.SubAlignment.append(AlignValue)
+
+ if self._IsToken(T_CHAR_BRACE_R):
+ self._UndoToken()
+ break
+
+ if len(FfsFileObj.SubAlignment) == 1:
+ FfsFileObj.SubAlignment = FfsFileObj.SubAlignment[0]
+ if len(FfsFileObj.FileName) == 1:
+ FfsFileObj.FileName = FfsFileObj.FileName[0]
+
+ ## _GetFileOpts() method
+ #
+ # Get options for FILE statement
+ #
+ # @param self The object pointer
+ # @param FfsFileObj for whom options is got
+ #
+ def _GetFileOpts(self, FfsFileObj):
+ if self._GetNextToken():
+ if TokenFindPattern.match(self._Token):
+ FfsFileObj.KeyStringList.append(self._Token)
+ if self._IsToken(TAB_COMMA_SPLIT):
+ while self._GetNextToken():
+ if not TokenFindPattern.match(self._Token):
+ raise Warning.Expected("KeyString \"Target_Tag_Arch\"", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.KeyStringList.append(self._Token)
+
+ if not self._IsToken(TAB_COMMA_SPLIT):
+ break
+
+ else:
+ self._UndoToken()
+
+ if self._IsKeyword("FIXED", True):
+ FfsFileObj.Fixed = True
+
+ if self._IsKeyword("CHECKSUM", True):
+ FfsFileObj.CheckSum = True
+
+ if self._GetAlignment():
+ if self._Token not in ALIGNMENTS:
+ raise Warning("Incorrect alignment '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+ #For FFS, Auto is default option same to ""
+ if not self._Token == "Auto":
+ FfsFileObj.Alignment = self._Token
+
+ ## _GetAlignment() method
+ #
+ # Return the alignment value
+ #
+ # @param self The object pointer
+ # @retval True Successfully find alignment
+ # @retval False Not able to find alignment
+ #
+ def _GetAlignment(self):
+ if self._IsKeyword("Align", True):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("alignment value", self.FileName, self.CurrentLineNumber)
+ return True
+
+ return False
+
+ ## _GetSectionData() method
+ #
+ # Get section data for FILE statement
+ #
+ # @param self The object pointer
+ # @param FfsFileObj for whom section is got
+ #
+ def _GetSectionData(self, FfsFileObj):
+ self._GetDefineStatements(FfsFileObj)
+
+ while True:
+ IsLeafSection = self._GetLeafSection(FfsFileObj)
+ IsEncapSection = self._GetEncapsulationSec(FfsFileObj)
+ if not IsLeafSection and not IsEncapSection:
+ break
+
+ ## _GetLeafSection() method
+ #
+ # Get leaf section for Obj
+ #
+ # @param self The object pointer
+ # @param Obj for whom leaf section is got
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def _GetLeafSection(self, Obj):
+ OldPos = self.GetFileBufferPos()
+
+ if not self._IsKeyword("SECTION"):
+ if len(Obj.SectionList) == 0:
+ raise Warning.Expected("SECTION", self.FileName, self.CurrentLineNumber)
+ else:
+ return False
+
+ AlignValue = None
+ if self._GetAlignment():
+ if self._Token not in ALIGNMENTS:
+ raise Warning("Incorrect alignment '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+ AlignValue = self._Token
+
+ BuildNum = None
+ if self._IsKeyword("BUILD_NUM"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("Build number value", self.FileName, self.CurrentLineNumber)
+
+ BuildNum = self._Token
+
+ if self._IsKeyword("VERSION"):
+ if AlignValue == 'Auto':
+ raise Warning("Auto alignment can only be used in PE32 or TE section ", self.FileName, self.CurrentLineNumber)
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken():
+ raise Warning.Expected("version", self.FileName, self.CurrentLineNumber)
+ VerSectionObj = VerSection()
+ VerSectionObj.Alignment = AlignValue
+ VerSectionObj.BuildNum = BuildNum
+ if self._GetStringData():
+ VerSectionObj.StringData = self._Token
+ else:
+ VerSectionObj.FileName = self._Token
+ Obj.SectionList.append(VerSectionObj)
+
+ elif self._IsKeyword(BINARY_FILE_TYPE_UI):
+ if AlignValue == 'Auto':
+ raise Warning("Auto alignment can only be used in PE32 or TE section ", self.FileName, self.CurrentLineNumber)
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken():
+ raise Warning.Expected("UI", self.FileName, self.CurrentLineNumber)
+ UiSectionObj = UiSection()
+ UiSectionObj.Alignment = AlignValue
+ if self._GetStringData():
+ UiSectionObj.StringData = self._Token
+ else:
+ UiSectionObj.FileName = self._Token
+ Obj.SectionList.append(UiSectionObj)
+
+ elif self._IsKeyword("FV_IMAGE"):
+ if AlignValue == 'Auto':
+ raise Warning("Auto alignment can only be used in PE32 or TE section ", self.FileName, self.CurrentLineNumber)
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken():
+ raise Warning.Expected("FV name or FV file path", self.FileName, self.CurrentLineNumber)
+
+ FvName = self._Token
+ FvObj = None
+
+ if self._IsToken("{"):
+ FvObj = FV()
+ FvObj.UiFvName = FvName.upper()
+ self._GetDefineStatements(FvObj)
+
+ self._GetBlockStatement(FvObj)
+ self._GetSetStatements(FvObj)
+ self._GetFvAlignment(FvObj)
+ self._GetFvAttributes(FvObj)
+
+ while True:
+ IsInf = self._GetInfStatement(FvObj)
+ IsFile = self._GetFileStatement(FvObj)
+ if not IsInf and not IsFile:
+ break
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+
+ FvImageSectionObj = FvImageSection()
+ FvImageSectionObj.Alignment = AlignValue
+ if FvObj is not None:
+ FvImageSectionObj.Fv = FvObj
+ FvImageSectionObj.FvName = None
+ else:
+ FvImageSectionObj.FvName = FvName.upper()
+ FvImageSectionObj.FvFileName = FvName
+
+ Obj.SectionList.append(FvImageSectionObj)
+
+ elif self._IsKeyword("PEI_DEPEX_EXP") or self._IsKeyword("DXE_DEPEX_EXP") or self._IsKeyword("SMM_DEPEX_EXP"):
+ if AlignValue == 'Auto':
+ raise Warning("Auto alignment can only be used in PE32 or TE section ", self.FileName, self.CurrentLineNumber)
+ DepexSectionObj = DepexSection()
+ DepexSectionObj.Alignment = AlignValue
+ DepexSectionObj.DepexType = self._Token
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._IsToken("{"):
+ raise Warning.ExpectedCurlyOpen(self.FileName, self.CurrentLineNumber)
+ if not self._SkipToToken(T_CHAR_BRACE_R):
+ raise Warning.Expected("Depex expression ending '}'", self.FileName, self.CurrentLineNumber)
+
+ DepexSectionObj.Expression = self._SkippedChars.rstrip(T_CHAR_BRACE_R)
+ Obj.SectionList.append(DepexSectionObj)
+
+ else:
+ if not self._GetNextWord():
+ raise Warning.Expected("section type", self.FileName, self.CurrentLineNumber)
+
+ # Encapsulation section appear, UndoToken and return
+ if self._Token == "COMPRESS" or self._Token == "GUIDED":
+ self.SetFileBufferPos(OldPos)
+ return False
+
+ if self._Token not in {"COMPAT16", BINARY_FILE_TYPE_PE32, BINARY_FILE_TYPE_PIC, BINARY_FILE_TYPE_TE, "FV_IMAGE", "RAW", BINARY_FILE_TYPE_DXE_DEPEX,\
+ BINARY_FILE_TYPE_UI, "VERSION", BINARY_FILE_TYPE_PEI_DEPEX, "SUBTYPE_GUID", BINARY_FILE_TYPE_SMM_DEPEX}:
+ raise Warning("Unknown section type '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+ if AlignValue == 'Auto'and (not self._Token == BINARY_FILE_TYPE_PE32) and (not self._Token == BINARY_FILE_TYPE_TE):
+ raise Warning("Auto alignment can only be used in PE32 or TE section ", self.FileName, self.CurrentLineNumber)
+
+ # DataSection
+ DataSectionObj = DataSection()
+ DataSectionObj.Alignment = AlignValue
+ DataSectionObj.SecType = self._Token
+
+ if self._IsKeyword('RELOCS_STRIPPED') or self._IsKeyword('RELOCS_RETAINED'):
+ if self._FileCouldHaveRelocFlag(Obj.FvFileType) and self._SectionCouldHaveRelocFlag(DataSectionObj.SecType):
+ if self._Token == 'RELOCS_STRIPPED':
+ DataSectionObj.KeepReloc = False
+ else:
+ DataSectionObj.KeepReloc = True
+ else:
+ raise Warning("File type %s, section type %s, could not have reloc strip flag%d" % (Obj.FvFileType, DataSectionObj.SecType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if self._IsToken(TAB_EQUAL_SPLIT):
+ if not self._GetNextToken():
+ raise Warning.Expected("section file path", self.FileName, self.CurrentLineNumber)
+ DataSectionObj.SectFileName = self._Token
+ self._VerifyFile(DataSectionObj.SectFileName)
+ else:
+ if not self._GetCglSection(DataSectionObj):
+ return False
+
+ Obj.SectionList.append(DataSectionObj)
+
+ return True
+
+ ## _VerifyFile
+ #
+ # Check if file exists or not:
+ # If current phase if GenFds, the file must exist;
+ # If current phase is AutoGen and the file is not in $(OUTPUT_DIRECTORY), the file must exist
+ # @param FileName: File path to be verified.
+ #
+ def _VerifyFile(self, FileName):
+ if FileName.replace(TAB_WORKSPACE, '').find('$') != -1:
+ return
+ if not GlobalData.gAutoGenPhase or not self._GetMacroValue(TAB_DSC_DEFINES_OUTPUT_DIRECTORY) in FileName:
+ ErrorCode, ErrorInfo = PathClass(NormPath(FileName), GenFdsGlobalVariable.WorkSpaceDir).Validate()
+ if ErrorCode != 0:
+ EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
+
+ ## _GetCglSection() method
+ #
+ # Get compressed or GUIDed section for Obj
+ #
+ # @param self The object pointer
+ # @param Obj for whom leaf section is got
+ # @param AlignValue alignment value for complex section
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def _GetCglSection(self, Obj, AlignValue = None):
+
+ if self._IsKeyword("COMPRESS"):
+ type = "PI_STD"
+ if self._IsKeyword("PI_STD") or self._IsKeyword("PI_NONE"):
+ type = self._Token
+
+ if not self._IsToken("{"):
+ raise Warning.ExpectedCurlyOpen(self.FileName, self.CurrentLineNumber)
+
+ CompressSectionObj = CompressSection()
+ CompressSectionObj.Alignment = AlignValue
+ CompressSectionObj.CompType = type
+ # Recursive sections...
+ while True:
+ IsLeafSection = self._GetLeafSection(CompressSectionObj)
+ IsEncapSection = self._GetEncapsulationSec(CompressSectionObj)
+ if not IsLeafSection and not IsEncapSection:
+ break
+
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+ Obj.SectionList.append(CompressSectionObj)
+ return True
+
+ elif self._IsKeyword("GUIDED"):
+ GuidValue = None
+ if self._GetNextGuid():
+ if self._Token in GlobalData.gGuidDict:
+ self._Token = GuidStructureStringToGuidString(GlobalData.gGuidDict[self._Token]).upper()
+ GuidValue = self._Token
+
+ AttribDict = self._GetGuidAttrib()
+ if not self._IsToken("{"):
+ raise Warning.ExpectedCurlyOpen(self.FileName, self.CurrentLineNumber)
+ GuidSectionObj = GuidSection()
+ GuidSectionObj.Alignment = AlignValue
+ GuidSectionObj.NameGuid = GuidValue
+ GuidSectionObj.SectionType = "GUIDED"
+ GuidSectionObj.ProcessRequired = AttribDict["PROCESSING_REQUIRED"]
+ GuidSectionObj.AuthStatusValid = AttribDict["AUTH_STATUS_VALID"]
+ GuidSectionObj.ExtraHeaderSize = AttribDict["EXTRA_HEADER_SIZE"]
+ # Recursive sections...
+ while True:
+ IsLeafSection = self._GetLeafSection(GuidSectionObj)
+ IsEncapSection = self._GetEncapsulationSec(GuidSectionObj)
+ if not IsLeafSection and not IsEncapSection:
+ break
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+ Obj.SectionList.append(GuidSectionObj)
+
+ return True
+
+ return False
+
+ ## _GetGuidAttri() method
+ #
+ # Get attributes for GUID section
+ #
+ # @param self The object pointer
+ # @retval AttribDict Dictionary of key-value pair of section attributes
+ #
+ def _GetGuidAttrib(self):
+ AttribDict = {}
+ AttribDict["PROCESSING_REQUIRED"] = "NONE"
+ AttribDict["AUTH_STATUS_VALID"] = "NONE"
+ AttribDict["EXTRA_HEADER_SIZE"] = -1
+ while self._IsKeyword("PROCESSING_REQUIRED") or self._IsKeyword("AUTH_STATUS_VALID") \
+ or self._IsKeyword("EXTRA_HEADER_SIZE"):
+ AttribKey = self._Token
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("TRUE(1)/FALSE(0)/Number", self.FileName, self.CurrentLineNumber)
+ elif AttribKey == "EXTRA_HEADER_SIZE":
+ Base = 10
+ if self._Token[0:2].upper() == "0X":
+ Base = 16
+ try:
+ AttribDict[AttribKey] = int(self._Token, Base)
+ continue
+ except ValueError:
+ raise Warning.Expected("Number", self.FileName, self.CurrentLineNumber)
+ elif self._Token.upper() not in {"TRUE", "FALSE", "1", "0"}:
+ raise Warning.Expected("TRUE/FALSE (1/0)", self.FileName, self.CurrentLineNumber)
+ AttribDict[AttribKey] = self._Token
+
+ return AttribDict
+
+ ## _GetEncapsulationSec() method
+ #
+ # Get encapsulation section for FILE
+ #
+ # @param self The object pointer
+ # @param FfsFile for whom section is got
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def _GetEncapsulationSec(self, FfsFileObj):
+ OldPos = self.GetFileBufferPos()
+ if not self._IsKeyword("SECTION"):
+ if len(FfsFileObj.SectionList) == 0:
+ raise Warning.Expected("SECTION", self.FileName, self.CurrentLineNumber)
+ else:
+ return False
+
+ AlignValue = None
+ if self._GetAlignment():
+ if self._Token not in ALIGNMENT_NOAUTO:
+ raise Warning("Incorrect alignment '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+ AlignValue = self._Token
+
+ if not self._GetCglSection(FfsFileObj, AlignValue):
+ self.SetFileBufferPos(OldPos)
+ return False
+ else:
+ return True
+
+ def _GetFmp(self):
+ if not self._GetNextToken():
+ return False
+ S = self._Token.upper()
+ if S.startswith(TAB_SECTION_START) and not S.startswith("[FMPPAYLOAD."):
+ self.SectionParser(S)
+ self._UndoToken()
+ return False
+
+ self._UndoToken()
+ self._SkipToToken("[FMPPAYLOAD.", True)
+ FmpUiName = self._GetUiName().upper()
+ if FmpUiName in self.Profile.FmpPayloadDict:
+ raise Warning("Duplicated FMP UI name found: %s" % FmpUiName, self.FileName, self.CurrentLineNumber)
+
+ FmpData = CapsulePayload()
+ FmpData.UiName = FmpUiName
+
+ if not self._IsToken(TAB_SECTION_END):
+ raise Warning.ExpectedBracketClose(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning("The FMP payload section is empty!", self.FileName, self.CurrentLineNumber)
+ FmpKeyList = ['IMAGE_HEADER_INIT_VERSION', 'IMAGE_TYPE_ID', 'IMAGE_INDEX', 'HARDWARE_INSTANCE', 'CERTIFICATE_GUID', 'MONOTONIC_COUNT']
+ while self._Token in FmpKeyList:
+ Name = self._Token
+ FmpKeyList.remove(Name)
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if Name == 'IMAGE_TYPE_ID':
+ if not self._GetNextGuid():
+ raise Warning.Expected("GUID value for IMAGE_TYPE_ID.", self.FileName, self.CurrentLineNumber)
+ FmpData.ImageTypeId = self._Token
+ elif Name == 'CERTIFICATE_GUID':
+ if not self._GetNextGuid():
+ raise Warning.Expected("GUID value for CERTIFICATE_GUID.", self.FileName, self.CurrentLineNumber)
+ FmpData.Certificate_Guid = self._Token
+ if UUID(FmpData.Certificate_Guid) != EFI_CERT_TYPE_RSA2048_SHA256_GUID and UUID(FmpData.Certificate_Guid) != EFI_CERT_TYPE_PKCS7_GUID:
+ raise Warning("Only support EFI_CERT_TYPE_RSA2048_SHA256_GUID or EFI_CERT_TYPE_PKCS7_GUID for CERTIFICATE_GUID.", self.FileName, self.CurrentLineNumber)
+ else:
+ if not self._GetNextToken():
+ raise Warning.Expected("value of %s" % Name, self.FileName, self.CurrentLineNumber)
+ Value = self._Token
+ if Name == 'IMAGE_HEADER_INIT_VERSION':
+ if FdfParser._Verify(Name, Value, 'UINT8'):
+ FmpData.Version = Value
+ elif Name == 'IMAGE_INDEX':
+ if FdfParser._Verify(Name, Value, 'UINT8'):
+ FmpData.ImageIndex = Value
+ elif Name == 'HARDWARE_INSTANCE':
+ if FdfParser._Verify(Name, Value, 'UINT8'):
+ FmpData.HardwareInstance = Value
+ elif Name == 'MONOTONIC_COUNT':
+ if FdfParser._Verify(Name, Value, 'UINT64'):
+ FmpData.MonotonicCount = Value
+ if FmpData.MonotonicCount.upper().startswith('0X'):
+ FmpData.MonotonicCount = int(FmpData.MonotonicCount, 16)
+ else:
+ FmpData.MonotonicCount = int(FmpData.MonotonicCount)
+ if not self._GetNextToken():
+ break
+ else:
+ self._UndoToken()
+
+ if (FmpData.MonotonicCount and not FmpData.Certificate_Guid) or (not FmpData.MonotonicCount and FmpData.Certificate_Guid):
+ EdkLogger.error("FdfParser", FORMAT_INVALID, "CERTIFICATE_GUID and MONOTONIC_COUNT must be work as a pair.")
+
+ # Only the IMAGE_TYPE_ID is required item
+ if FmpKeyList and 'IMAGE_TYPE_ID' in FmpKeyList:
+ raise Warning("'IMAGE_TYPE_ID' in FMP payload section.", self.FileName, self.CurrentLineNumber)
+ # get the Image file and Vendor code file
+ self._GetFMPCapsuleData(FmpData)
+ if not FmpData.ImageFile:
+ raise Warning("Missing image file in FMP payload section.", self.FileName, self.CurrentLineNumber)
+ # check whether more than one Vendor code file
+ if len(FmpData.VendorCodeFile) > 1:
+ raise Warning("Vendor code file max of 1 per FMP payload section.", self.FileName, self.CurrentLineNumber)
+ self.Profile.FmpPayloadDict[FmpUiName] = FmpData
+ return True
+
+ ## _GetCapsule() method
+ #
+ # Get capsule section contents and store its data into capsule list of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a capsule
+ # @retval False Not able to find a capsule
+ #
+ def _GetCapsule(self):
+ if not self._GetNextToken():
+ return False
+
+ S = self._Token.upper()
+ if S.startswith(TAB_SECTION_START) and not S.startswith("[CAPSULE."):
+ self.SectionParser(S)
+ self._UndoToken()
+ return False
+
+ self._UndoToken()
+ if not self._IsToken("[CAPSULE.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ # % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine:], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning.Expected("[Capsule.]", self.FileName, self.CurrentLineNumber)
+
+ CapsuleObj = Capsule()
+
+ CapsuleName = self._GetUiName()
+ if not CapsuleName:
+ raise Warning.Expected("capsule name", self.FileName, self.CurrentLineNumber)
+
+ CapsuleObj.UiCapsuleName = CapsuleName.upper()
+
+ if not self._IsToken(TAB_SECTION_END):
+ raise Warning.ExpectedBracketClose(self.FileName, self.CurrentLineNumber)
+
+ if self._IsKeyword("CREATE_FILE"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("file name", self.FileName, self.CurrentLineNumber)
+
+ CapsuleObj.CreateFile = self._Token
+
+ self._GetCapsuleStatements(CapsuleObj)
+ self.Profile.CapsuleDict[CapsuleObj.UiCapsuleName] = CapsuleObj
+ return True
+
+ ## _GetCapsuleStatements() method
+ #
+ # Get statements for capsule
+ #
+ # @param self The object pointer
+ # @param Obj for whom statements are got
+ #
+ def _GetCapsuleStatements(self, Obj):
+ self._GetCapsuleTokens(Obj)
+ self._GetDefineStatements(Obj)
+ self._GetSetStatements(Obj)
+ self._GetCapsuleData(Obj)
+
+ ## _GetCapsuleTokens() method
+ #
+ # Get token statements for capsule
+ #
+ # @param self The object pointer
+ # @param Obj for whom token statements are got
+ #
+ def _GetCapsuleTokens(self, Obj):
+ if not self._GetNextToken():
+ return False
+ while self._Token in {"CAPSULE_GUID", "CAPSULE_HEADER_SIZE", "CAPSULE_FLAGS", "OEM_CAPSULE_FLAGS", "CAPSULE_HEADER_INIT_VERSION"}:
+ Name = self._Token.strip()
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken():
+ raise Warning.Expected("value", self.FileName, self.CurrentLineNumber)
+ if Name == 'CAPSULE_FLAGS':
+ if not self._Token in {"PersistAcrossReset", "PopulateSystemTable", "InitiateReset"}:
+ raise Warning.Expected("PersistAcrossReset, PopulateSystemTable, or InitiateReset", self.FileName, self.CurrentLineNumber)
+ Value = self._Token.strip()
+ while self._IsToken(TAB_COMMA_SPLIT):
+ Value += TAB_COMMA_SPLIT
+ if not self._GetNextToken():
+ raise Warning.Expected("value", self.FileName, self.CurrentLineNumber)
+ if not self._Token in {"PersistAcrossReset", "PopulateSystemTable", "InitiateReset"}:
+ raise Warning.Expected("PersistAcrossReset, PopulateSystemTable, or InitiateReset", self.FileName, self.CurrentLineNumber)
+ Value += self._Token.strip()
+ elif Name == 'OEM_CAPSULE_FLAGS':
+ Value = self._Token.strip()
+ if not Value.upper().startswith('0X'):
+ raise Warning.Expected("hex value starting with 0x", self.FileName, self.CurrentLineNumber)
+ try:
+ Value = int(Value, 0)
+ except ValueError:
+ raise Warning.Expected("hex string failed to convert to value", self.FileName, self.CurrentLineNumber)
+ if not 0x0000 <= Value <= 0xFFFF:
+ raise Warning.Expected("hex value between 0x0000 and 0xFFFF", self.FileName, self.CurrentLineNumber)
+ Value = self._Token.strip()
+ else:
+ Value = self._Token.strip()
+ Obj.TokensDict[Name] = Value
+ if not self._GetNextToken():
+ return False
+ self._UndoToken()
+
+ ## _GetCapsuleData() method
+ #
+ # Get capsule data for capsule
+ #
+ # @param self The object pointer
+ # @param Obj for whom capsule data are got
+ #
+ def _GetCapsuleData(self, Obj):
+ while True:
+ IsInf = self._GetInfStatement(Obj, True)
+ IsFile = self._GetFileStatement(Obj, True)
+ IsFv = self._GetFvStatement(Obj)
+ IsFd = self._GetFdStatement(Obj)
+ IsAnyFile = self._GetAnyFileStatement(Obj)
+ IsAfile = self._GetAfileStatement(Obj)
+ IsFmp = self._GetFmpStatement(Obj)
+ if not (IsInf or IsFile or IsFv or IsFd or IsAnyFile or IsAfile or IsFmp):
+ break
+
+ ## _GetFMPCapsuleData() method
+ #
+ # Get capsule data for FMP capsule
+ #
+ # @param self The object pointer
+ # @param Obj for whom capsule data are got
+ #
+ def _GetFMPCapsuleData(self, Obj):
+ while True:
+ IsFv = self._GetFvStatement(Obj, True)
+ IsFd = self._GetFdStatement(Obj, True)
+ IsAnyFile = self._GetAnyFileStatement(Obj, True)
+ if not (IsFv or IsFd or IsAnyFile):
+ break
+
+ ## _GetFvStatement() method
+ #
+ # Get FV for capsule
+ #
+ # @param self The object pointer
+ # @param CapsuleObj for whom FV is got
+ # @retval True Successfully find a FV statement
+ # @retval False Not able to find a FV statement
+ #
+ def _GetFvStatement(self, CapsuleObj, FMPCapsule = False):
+ if not self._IsKeyword(BINARY_FILE_TYPE_FV):
+ return False
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("FV name", self.FileName, self.CurrentLineNumber)
+
+ if self._Token.upper() not in self.Profile.FvDict:
+ raise Warning("FV name does not exist", self.FileName, self.CurrentLineNumber)
+
+ myCapsuleFv = CapsuleFv()
+ myCapsuleFv.FvName = self._Token
+ if FMPCapsule:
+ if not CapsuleObj.ImageFile:
+ CapsuleObj.ImageFile.append(myCapsuleFv)
+ else:
+ CapsuleObj.VendorCodeFile.append(myCapsuleFv)
+ else:
+ CapsuleObj.CapsuleDataList.append(myCapsuleFv)
+ return True
+
+ ## _GetFdStatement() method
+ #
+ # Get FD for capsule
+ #
+ # @param self The object pointer
+ # @param CapsuleObj for whom FD is got
+ # @retval True Successfully find a FD statement
+ # @retval False Not able to find a FD statement
+ #
+ def _GetFdStatement(self, CapsuleObj, FMPCapsule = False):
+ if not self._IsKeyword("FD"):
+ return False
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("FD name", self.FileName, self.CurrentLineNumber)
+
+ if self._Token.upper() not in self.Profile.FdDict:
+ raise Warning("FD name does not exist", self.FileName, self.CurrentLineNumber)
+
+ myCapsuleFd = CapsuleFd()
+ myCapsuleFd.FdName = self._Token
+ if FMPCapsule:
+ if not CapsuleObj.ImageFile:
+ CapsuleObj.ImageFile.append(myCapsuleFd)
+ else:
+ CapsuleObj.VendorCodeFile.append(myCapsuleFd)
+ else:
+ CapsuleObj.CapsuleDataList.append(myCapsuleFd)
+ return True
+
+ def _GetFmpStatement(self, CapsuleObj):
+ if not self._IsKeyword("FMP_PAYLOAD"):
+ if not self._IsKeyword("FMP"):
+ return False
+
+ if not self._IsKeyword("PAYLOAD"):
+ self._UndoToken()
+ return False
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("payload name after FMP_PAYLOAD =", self.FileName, self.CurrentLineNumber)
+ Payload = self._Token.upper()
+ if Payload not in self.Profile.FmpPayloadDict:
+ raise Warning("This FMP Payload does not exist: %s" % self._Token, self.FileName, self.CurrentLineNumber)
+ CapsuleObj.FmpPayloadList.append(self.Profile.FmpPayloadDict[Payload])
+ return True
+
+ def _ParseRawFileStatement(self):
+ if not self._IsKeyword("FILE"):
+ return None
+
+ if not self._IsKeyword("DATA"):
+ self._UndoToken()
+ return None
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("File name", self.FileName, self.CurrentLineNumber)
+
+ AnyFileName = self._Token
+ self._VerifyFile(AnyFileName)
+
+ if not os.path.isabs(AnyFileName):
+ AnyFileName = mws.join(GenFdsGlobalVariable.WorkSpaceDir, AnyFileName)
+
+ return AnyFileName
+
+ ## _GetAnyFileStatement() method
+ #
+ # Get AnyFile for capsule
+ #
+ # @param self The object pointer
+ # @param CapsuleObj for whom AnyFile is got
+ # @retval True Successfully find a Anyfile statement
+ # @retval False Not able to find a AnyFile statement
+ #
+ def _GetAnyFileStatement(self, CapsuleObj, FMPCapsule = False):
+ AnyFileName = self._ParseRawFileStatement()
+ if not AnyFileName:
+ return False
+
+ myCapsuleAnyFile = CapsuleAnyFile()
+ myCapsuleAnyFile.FileName = AnyFileName
+ if FMPCapsule:
+ if not CapsuleObj.ImageFile:
+ CapsuleObj.ImageFile.append(myCapsuleAnyFile)
+ else:
+ CapsuleObj.VendorCodeFile.append(myCapsuleAnyFile)
+ else:
+ CapsuleObj.CapsuleDataList.append(myCapsuleAnyFile)
+ return True
+
+ ## _GetAfileStatement() method
+ #
+ # Get Afile for capsule
+ #
+ # @param self The object pointer
+ # @param CapsuleObj for whom Afile is got
+ # @retval True Successfully find a Afile statement
+ # @retval False Not able to find a Afile statement
+ #
+ def _GetAfileStatement(self, CapsuleObj):
+ if not self._IsKeyword("APPEND"):
+ return False
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("Afile name", self.FileName, self.CurrentLineNumber)
+
+ AfileName = self._Token
+ AfileBaseName = os.path.basename(AfileName)
+
+ if os.path.splitext(AfileBaseName)[1] not in {".bin", ".BIN", ".Bin", ".dat", ".DAT", ".Dat", ".data", ".DATA", ".Data"}:
+ raise Warning('invalid binary file type, should be one of "bin",BINARY_FILE_TYPE_BIN,"Bin","dat","DAT","Dat","data","DATA","Data"', \
+ self.FileName, self.CurrentLineNumber)
+
+ if not os.path.isabs(AfileName):
+ AfileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(AfileName)
+ self._VerifyFile(AfileName)
+ else:
+ if not os.path.exists(AfileName):
+ raise Warning('%s does not exist' % AfileName, self.FileName, self.CurrentLineNumber)
+ else:
+ pass
+
+ myCapsuleAfile = CapsuleAfile()
+ myCapsuleAfile.FileName = AfileName
+ CapsuleObj.CapsuleDataList.append(myCapsuleAfile)
+ return True
+
+ ## _GetRule() method
+ #
+ # Get Rule section contents and store its data into rule list of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a Rule
+ # @retval False Not able to find a Rule
+ #
+ def _GetRule(self):
+ if not self._GetNextToken():
+ return False
+
+ S = self._Token.upper()
+ if S.startswith(TAB_SECTION_START) and not S.startswith("[RULE."):
+ self.SectionParser(S)
+ self._UndoToken()
+ return False
+ self._UndoToken()
+ if not self._IsToken("[Rule.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ # % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine:], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning.Expected("[Rule.]", self.FileName, self.CurrentLineNumber)
+
+ if not self._SkipToToken(TAB_SPLIT):
+ raise Warning.Expected("'.'", self.FileName, self.CurrentLineNumber)
+
+ Arch = self._SkippedChars.rstrip(TAB_SPLIT)
+
+ ModuleType = self._GetModuleType()
+
+ TemplateName = ""
+ if self._IsToken(TAB_SPLIT):
+ if not self._GetNextWord():
+ raise Warning.Expected("template name", self.FileName, self.CurrentLineNumber)
+ TemplateName = self._Token
+
+ if not self._IsToken(TAB_SECTION_END):
+ raise Warning.ExpectedBracketClose(self.FileName, self.CurrentLineNumber)
+
+ RuleObj = self._GetRuleFileStatements()
+ RuleObj.Arch = Arch.upper()
+ RuleObj.ModuleType = ModuleType
+ RuleObj.TemplateName = TemplateName
+ if TemplateName == '':
+ self.Profile.RuleDict['RULE' + \
+ TAB_SPLIT + \
+ Arch.upper() + \
+ TAB_SPLIT + \
+ ModuleType.upper() ] = RuleObj
+ else:
+ self.Profile.RuleDict['RULE' + \
+ TAB_SPLIT + \
+ Arch.upper() + \
+ TAB_SPLIT + \
+ ModuleType.upper() + \
+ TAB_SPLIT + \
+ TemplateName.upper() ] = RuleObj
+ return True
+
+ ## _GetModuleType() method
+ #
+ # Return the module type
+ #
+ # @param self The object pointer
+ # @retval string module type
+ #
+ def _GetModuleType(self):
+ if not self._GetNextWord():
+ raise Warning.Expected("Module type", self.FileName, self.CurrentLineNumber)
+ if self._Token.upper() not in {
+ SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM,
+ SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER,
+ SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_DXE_SMM_DRIVER,
+ SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_UEFI_DRIVER,
+ SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION,
+ TAB_DEFAULT, SUP_MODULE_BASE,
+ EDK_COMPONENT_TYPE_SECURITY_CORE,
+ EDK_COMPONENT_TYPE_COMBINED_PEIM_DRIVER,
+ EDK_COMPONENT_TYPE_PIC_PEIM,
+ EDK_COMPONENT_TYPE_RELOCATABLE_PEIM, "PE32_PEIM",
+ EDK_COMPONENT_TYPE_BS_DRIVER, EDK_COMPONENT_TYPE_RT_DRIVER,
+ EDK_COMPONENT_TYPE_SAL_RT_DRIVER,
+ EDK_COMPONENT_TYPE_APPLICATION, "ACPITABLE",
+ SUP_MODULE_SMM_CORE, SUP_MODULE_MM_STANDALONE,
+ SUP_MODULE_MM_CORE_STANDALONE}:
+ raise Warning("Unknown Module type '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+ return self._Token
+
+ ## _GetFileExtension() method
+ #
+ # Return the file extension
+ #
+ # @param self The object pointer
+ # @retval string file name extension
+ #
+ def _GetFileExtension(self):
+ if not self._IsToken(TAB_SPLIT):
+ raise Warning.Expected("'.'", self.FileName, self.CurrentLineNumber)
+
+ Ext = ""
+ if self._GetNextToken():
+ if FileExtensionPattern.match(self._Token):
+ Ext = self._Token
+ return TAB_SPLIT + Ext
+ else:
+ raise Warning("Unknown file extension '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+
+ else:
+ raise Warning.Expected("file extension", self.FileName, self.CurrentLineNumber)
+
+ ## _GetRuleFileStatement() method
+ #
+ # Get rule contents
+ #
+ # @param self The object pointer
+ # @retval Rule Rule object
+ #
+ def _GetRuleFileStatements(self):
+ if not self._IsKeyword("FILE"):
+ raise Warning.Expected("FILE", self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextWord():
+ raise Warning.Expected("FFS type", self.FileName, self.CurrentLineNumber)
+
+ Type = self._Token.strip().upper()
+ if Type not in {"RAW", "FREEFORM", SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM,
+ "PEI_DXE_COMBO", "DRIVER", SUP_MODULE_DXE_CORE, EDK_COMPONENT_TYPE_APPLICATION,
+ "FV_IMAGE", "SMM", SUP_MODULE_SMM_CORE, SUP_MODULE_MM_STANDALONE,
+ SUP_MODULE_MM_CORE_STANDALONE}:
+ raise Warning("Unknown FV type '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._IsKeyword("$(NAMED_GUID)"):
+ if not self._GetNextWord():
+ NamedGuid = self._CurrentLine()[self.CurrentOffsetWithinLine:].split()[0].strip()
+ if GlobalData.gGuidPatternEnd.match(NamedGuid):
+ self.CurrentOffsetWithinLine += len(NamedGuid)
+ self._Token = NamedGuid
+ else:
+ raise Warning.Expected("$(NAMED_GUID)", self.FileName, self.CurrentLineNumber)
+ if self._Token == 'PCD':
+ if not self._IsToken("("):
+ raise Warning.Expected("'('", self.FileName, self.CurrentLineNumber)
+ PcdPair = self._GetNextPcdSettings()
+ if not self._IsToken(")"):
+ raise Warning.Expected("')'", self.FileName, self.CurrentLineNumber)
+ self._Token = 'PCD('+PcdPair[1]+TAB_SPLIT+PcdPair[0]+')'
+
+ NameGuid = self._Token
+
+ KeepReloc = None
+ if self._IsKeyword('RELOCS_STRIPPED') or self._IsKeyword('RELOCS_RETAINED'):
+ if self._FileCouldHaveRelocFlag(Type):
+ if self._Token == 'RELOCS_STRIPPED':
+ KeepReloc = False
+ else:
+ KeepReloc = True
+ else:
+ raise Warning("File type %s could not have reloc strip flag%d" % (Type, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ KeyStringList = []
+ if self._GetNextToken():
+ if TokenFindPattern.match(self._Token):
+ KeyStringList.append(self._Token)
+ if self._IsToken(TAB_COMMA_SPLIT):
+ while self._GetNextToken():
+ if not TokenFindPattern.match(self._Token):
+ raise Warning.Expected("KeyString \"Target_Tag_Arch\"", self.FileName, self.CurrentLineNumber)
+ KeyStringList.append(self._Token)
+
+ if not self._IsToken(TAB_COMMA_SPLIT):
+ break
+
+ else:
+ self._UndoToken()
+
+
+ Fixed = False
+ if self._IsKeyword("Fixed", True):
+ Fixed = True
+
+ CheckSum = False
+ if self._IsKeyword("CheckSum", True):
+ CheckSum = True
+
+ AlignValue = ""
+ if self._GetAlignment():
+ if self._Token not in ALIGNMENTS:
+ raise Warning("Incorrect alignment '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+ #For FFS, Auto is default option same to ""
+ if not self._Token == "Auto":
+ AlignValue = self._Token
+
+ if self._IsToken("{"):
+ # Complex file rule expected
+ NewRule = RuleComplexFile()
+ NewRule.FvFileType = Type
+ NewRule.NameGuid = NameGuid
+ NewRule.Alignment = AlignValue
+ NewRule.CheckSum = CheckSum
+ NewRule.Fixed = Fixed
+ NewRule.KeyStringList = KeyStringList
+ if KeepReloc is not None:
+ NewRule.KeepReloc = KeepReloc
+
+ while True:
+ IsEncapsulate = self._GetRuleEncapsulationSection(NewRule)
+ IsLeaf = self._GetEfiSection(NewRule)
+ if not IsEncapsulate and not IsLeaf:
+ break
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+
+ return NewRule
+
+ else:
+ # Simple file rule expected
+ if not self._GetNextWord():
+ raise Warning.Expected("leaf section type", self.FileName, self.CurrentLineNumber)
+
+ SectionName = self._Token
+
+ if SectionName not in {
+ "COMPAT16", BINARY_FILE_TYPE_PE32,
+ BINARY_FILE_TYPE_PIC, BINARY_FILE_TYPE_TE, "FV_IMAGE",
+ "RAW",BINARY_FILE_TYPE_DXE_DEPEX, BINARY_FILE_TYPE_UI,
+ BINARY_FILE_TYPE_PEI_DEPEX, "VERSION", "SUBTYPE_GUID",
+ BINARY_FILE_TYPE_SMM_DEPEX}:
+ raise Warning("Unknown leaf section name '%s'" % SectionName, self.FileName, self.CurrentLineNumber)
+
+
+ if self._IsKeyword("Fixed", True):
+ Fixed = True
+
+ if self._IsKeyword("CheckSum", True):
+ CheckSum = True
+
+ SectAlignment = ""
+ if self._GetAlignment():
+ if self._Token not in ALIGNMENTS:
+ raise Warning("Incorrect alignment '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+ if self._Token == 'Auto' and (not SectionName == BINARY_FILE_TYPE_PE32) and (not SectionName == BINARY_FILE_TYPE_TE):
+ raise Warning("Auto alignment can only be used in PE32 or TE section ", self.FileName, self.CurrentLineNumber)
+ SectAlignment = self._Token
+
+ Ext = None
+ if self._IsToken(TAB_VALUE_SPLIT):
+ Ext = self._GetFileExtension()
+ elif not self._GetNextToken():
+ raise Warning.Expected("File name", self.FileName, self.CurrentLineNumber)
+
+ NewRule = RuleSimpleFile()
+ NewRule.SectionType = SectionName
+ NewRule.FvFileType = Type
+ NewRule.NameGuid = NameGuid
+ NewRule.Alignment = AlignValue
+ NewRule.SectAlignment = SectAlignment
+ NewRule.CheckSum = CheckSum
+ NewRule.Fixed = Fixed
+ NewRule.KeyStringList = KeyStringList
+ if KeepReloc is not None:
+ NewRule.KeepReloc = KeepReloc
+ NewRule.FileExtension = Ext
+ NewRule.FileName = self._Token
+ return NewRule
+
+ ## _GetEfiSection() method
+ #
+ # Get section list for Rule
+ #
+ # @param self The object pointer
+ # @param Obj for whom section is got
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def _GetEfiSection(self, Obj):
+ OldPos = self.GetFileBufferPos()
+ EfiSectionObj = EfiSection()
+ if not self._GetNextWord():
+ CurrentLine = self._CurrentLine()[self.CurrentOffsetWithinLine:].split()[0].strip()
+ if self._Token == '{' and Obj.FvFileType == "RAW" and TAB_SPLIT in CurrentLine:
+ if self._IsToken(TAB_VALUE_SPLIT):
+ EfiSectionObj.FileExtension = self._GetFileExtension()
+ elif self._GetNextToken():
+ EfiSectionObj.FileName = self._Token
+ EfiSectionObj.SectionType = BINARY_FILE_TYPE_RAW
+ Obj.SectionList.append(EfiSectionObj)
+ return True
+ else:
+ return False
+ SectionName = self._Token
+
+ if SectionName not in {
+ "COMPAT16", BINARY_FILE_TYPE_PE32,
+ BINARY_FILE_TYPE_PIC, BINARY_FILE_TYPE_TE, "FV_IMAGE",
+ "RAW",BINARY_FILE_TYPE_DXE_DEPEX, BINARY_FILE_TYPE_UI,
+ BINARY_FILE_TYPE_PEI_DEPEX, "VERSION", "SUBTYPE_GUID",
+ BINARY_FILE_TYPE_SMM_DEPEX, BINARY_FILE_TYPE_GUID}:
+ self._UndoToken()
+ return False
+
+ if SectionName == "FV_IMAGE":
+ FvImageSectionObj = FvImageSection()
+ if self._IsKeyword("FV_IMAGE"):
+ pass
+ if self._IsToken("{"):
+ FvObj = FV()
+ self._GetDefineStatements(FvObj)
+ self._GetBlockStatement(FvObj)
+ self._GetSetStatements(FvObj)
+ self._GetFvAlignment(FvObj)
+ self._GetFvAttributes(FvObj)
+ self._GetAprioriSection(FvObj)
+ self._GetAprioriSection(FvObj)
+
+ while True:
+ IsInf = self._GetInfStatement(FvObj)
+ IsFile = self._GetFileStatement(FvObj)
+ if not IsInf and not IsFile:
+ break
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.Fv = FvObj
+ FvImageSectionObj.FvName = None
+
+ else:
+ if not self._IsKeyword(BINARY_FILE_TYPE_FV):
+ raise Warning.Expected("'FV'", self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.FvFileType = self._Token
+
+ if self._GetAlignment():
+ if self._Token not in ALIGNMENT_NOAUTO:
+ raise Warning("Incorrect alignment '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.Alignment = self._Token
+
+ if self._IsToken(TAB_VALUE_SPLIT):
+ FvImageSectionObj.FvFileExtension = self._GetFileExtension()
+ elif self._GetNextToken():
+ if self._Token not in {
+ T_CHAR_BRACE_R, "COMPAT16", BINARY_FILE_TYPE_PE32,
+ BINARY_FILE_TYPE_PIC, BINARY_FILE_TYPE_TE,
+ "FV_IMAGE", "RAW", BINARY_FILE_TYPE_DXE_DEPEX,
+ BINARY_FILE_TYPE_UI, "VERSION",
+ BINARY_FILE_TYPE_PEI_DEPEX, BINARY_FILE_TYPE_GUID,
+ BINARY_FILE_TYPE_SMM_DEPEX}:
+ FvImageSectionObj.FvFileName = self._Token
+ else:
+ self._UndoToken()
+ else:
+ raise Warning.Expected("FV file name", self.FileName, self.CurrentLineNumber)
+
+ Obj.SectionList.append(FvImageSectionObj)
+ return True
+
+ EfiSectionObj.SectionType = SectionName
+
+ if not self._GetNextToken():
+ raise Warning.Expected("file type", self.FileName, self.CurrentLineNumber)
+
+ if self._Token == "STRING":
+ if not self._RuleSectionCouldHaveString(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT have string data%d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("Quoted String", self.FileName, self.CurrentLineNumber)
+
+ if self._GetStringData():
+ EfiSectionObj.StringData = self._Token
+
+ if self._IsKeyword("BUILD_NUM"):
+ if not self._RuleSectionCouldHaveBuildNum(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT have BUILD_NUM%d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken():
+ raise Warning.Expected("Build number", self.FileName, self.CurrentLineNumber)
+ EfiSectionObj.BuildNum = self._Token
+
+ else:
+ EfiSectionObj.FileType = self._Token
+ self._CheckRuleSectionFileType(EfiSectionObj.SectionType, EfiSectionObj.FileType)
+
+ if self._IsKeyword("Optional"):
+ if not self._RuleSectionCouldBeOptional(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT be optional%d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+ EfiSectionObj.Optional = True
+
+ if self._IsKeyword("BUILD_NUM"):
+ if not self._RuleSectionCouldHaveBuildNum(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT have BUILD_NUM%d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken():
+ raise Warning.Expected("Build number", self.FileName, self.CurrentLineNumber)
+ EfiSectionObj.BuildNum = self._Token
+
+ if self._GetAlignment():
+ if self._Token not in ALIGNMENTS:
+ raise Warning("Incorrect alignment '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+ if self._Token == 'Auto' and (not SectionName == BINARY_FILE_TYPE_PE32) and (not SectionName == BINARY_FILE_TYPE_TE):
+ raise Warning("Auto alignment can only be used in PE32 or TE section ", self.FileName, self.CurrentLineNumber)
+ EfiSectionObj.Alignment = self._Token
+
+ if self._IsKeyword('RELOCS_STRIPPED') or self._IsKeyword('RELOCS_RETAINED'):
+ if self._SectionCouldHaveRelocFlag(EfiSectionObj.SectionType):
+ if self._Token == 'RELOCS_STRIPPED':
+ EfiSectionObj.KeepReloc = False
+ else:
+ EfiSectionObj.KeepReloc = True
+ if Obj.KeepReloc is not None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
+ raise Warning("Section type %s has reloc strip flag conflict with Rule" % EfiSectionObj.SectionType, self.FileName, self.CurrentLineNumber)
+ else:
+ raise Warning("Section type %s could not have reloc strip flag" % EfiSectionObj.SectionType, self.FileName, self.CurrentLineNumber)
+
+
+ if self._IsToken(TAB_VALUE_SPLIT):
+ EfiSectionObj.FileExtension = self._GetFileExtension()
+ elif self._GetNextToken():
+ if self._Token not in {
+ T_CHAR_BRACE_R, "COMPAT16", BINARY_FILE_TYPE_PE32,
+ BINARY_FILE_TYPE_PIC, BINARY_FILE_TYPE_TE,
+ "FV_IMAGE", "RAW", BINARY_FILE_TYPE_DXE_DEPEX,
+ BINARY_FILE_TYPE_UI, "VERSION",
+ BINARY_FILE_TYPE_PEI_DEPEX, BINARY_FILE_TYPE_GUID,
+ BINARY_FILE_TYPE_SMM_DEPEX}:
+
+ if self._Token.startswith('PCD'):
+ self._UndoToken()
+ self._GetNextWord()
+
+ if self._Token == 'PCD':
+ if not self._IsToken("("):
+ raise Warning.Expected("'('", self.FileName, self.CurrentLineNumber)
+ PcdPair = self._GetNextPcdSettings()
+ if not self._IsToken(")"):
+ raise Warning.Expected("')'", self.FileName, self.CurrentLineNumber)
+ self._Token = 'PCD('+PcdPair[1]+TAB_SPLIT+PcdPair[0]+')'
+
+ EfiSectionObj.FileName = self._Token
+
+ else:
+ self._UndoToken()
+ else:
+ raise Warning.Expected("section file name", self.FileName, self.CurrentLineNumber)
+
+ Obj.SectionList.append(EfiSectionObj)
+ return True
+
+ ## _RuleSectionCouldBeOptional() method
+ #
+ # Get whether a section could be optional
+ #
+ # @param SectionType The section type to check
+ # @retval True section could be optional
+ # @retval False section never optional
+ #
+ @staticmethod
+ def _RuleSectionCouldBeOptional(SectionType):
+ if SectionType in {BINARY_FILE_TYPE_DXE_DEPEX, BINARY_FILE_TYPE_UI, "VERSION", BINARY_FILE_TYPE_PEI_DEPEX, "RAW", BINARY_FILE_TYPE_SMM_DEPEX}:
+ return True
+ else:
+ return False
+
+ ## _RuleSectionCouldHaveBuildNum() method
+ #
+ # Get whether a section could have build number information
+ #
+ # @param SectionType The section type to check
+ # @retval True section could have build number information
+ # @retval False section never have build number information
+ #
+ @staticmethod
+ def _RuleSectionCouldHaveBuildNum(SectionType):
+ if SectionType == "VERSION":
+ return True
+ else:
+ return False
+
+ ## _RuleSectionCouldHaveString() method
+ #
+ # Get whether a section could have string
+ #
+ # @param SectionType The section type to check
+ # @retval True section could have string
+ # @retval False section never have string
+ #
+ @staticmethod
+ def _RuleSectionCouldHaveString(SectionType):
+ if SectionType in {BINARY_FILE_TYPE_UI, "VERSION"}:
+ return True
+ else:
+ return False
+
+ ## _CheckRuleSectionFileType() method
+ #
+ # Get whether a section matches a file type
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check
+ # @param FileType The file type to check
+ #
+ def _CheckRuleSectionFileType(self, SectionType, FileType):
+ WarningString = "Incorrect section file type '%s'"
+ if SectionType == "COMPAT16":
+ if FileType not in {"COMPAT16", "SEC_COMPAT16"}:
+ raise Warning(WarningString % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == BINARY_FILE_TYPE_PE32:
+ if FileType not in {BINARY_FILE_TYPE_PE32, "SEC_PE32"}:
+ raise Warning(WarningString % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == BINARY_FILE_TYPE_PIC:
+ if FileType not in {BINARY_FILE_TYPE_PIC, BINARY_FILE_TYPE_PIC}:
+ raise Warning(WarningString % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == BINARY_FILE_TYPE_TE:
+ if FileType not in {BINARY_FILE_TYPE_TE, "SEC_TE"}:
+ raise Warning(WarningString % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == "RAW":
+ if FileType not in {BINARY_FILE_TYPE_BIN, "SEC_BIN", "RAW", "ASL", "ACPI"}:
+ raise Warning(WarningString % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == BINARY_FILE_TYPE_DXE_DEPEX or SectionType == BINARY_FILE_TYPE_SMM_DEPEX:
+ if FileType not in {BINARY_FILE_TYPE_DXE_DEPEX, "SEC_DXE_DEPEX", BINARY_FILE_TYPE_SMM_DEPEX}:
+ raise Warning(WarningString % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == BINARY_FILE_TYPE_UI:
+ if FileType not in {BINARY_FILE_TYPE_UI, "SEC_UI"}:
+ raise Warning(WarningString % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == "VERSION":
+ if FileType not in {"VERSION", "SEC_VERSION"}:
+ raise Warning(WarningString % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == BINARY_FILE_TYPE_PEI_DEPEX:
+ if FileType not in {BINARY_FILE_TYPE_PEI_DEPEX, "SEC_PEI_DEPEX"}:
+ raise Warning(WarningString % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == BINARY_FILE_TYPE_GUID:
+ if FileType not in {BINARY_FILE_TYPE_PE32, "SEC_GUID"}:
+ raise Warning(WarningString % FileType, self.FileName, self.CurrentLineNumber)
+
+ ## _GetRuleEncapsulationSection() method
+ #
+ # Get encapsulation section for Rule
+ #
+ # @param self The object pointer
+ # @param theRule for whom section is got
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def _GetRuleEncapsulationSection(self, theRule):
+ if self._IsKeyword("COMPRESS"):
+ Type = "PI_STD"
+ if self._IsKeyword("PI_STD") or self._IsKeyword("PI_NONE"):
+ Type = self._Token
+
+ if not self._IsToken("{"):
+ raise Warning.ExpectedCurlyOpen(self.FileName, self.CurrentLineNumber)
+
+ CompressSectionObj = CompressSection()
+
+ CompressSectionObj.CompType = Type
+ # Recursive sections...
+ while True:
+ IsEncapsulate = self._GetRuleEncapsulationSection(CompressSectionObj)
+ IsLeaf = self._GetEfiSection(CompressSectionObj)
+ if not IsEncapsulate and not IsLeaf:
+ break
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+ theRule.SectionList.append(CompressSectionObj)
+
+ return True
+
+ elif self._IsKeyword("GUIDED"):
+ GuidValue = None
+ if self._GetNextGuid():
+ if self._Token in GlobalData.gGuidDict:
+ self._Token = GuidStructureStringToGuidString(GlobalData.gGuidDict[self._Token]).upper()
+ GuidValue = self._Token
+
+ if self._IsKeyword("$(NAMED_GUID)"):
+ GuidValue = self._Token
+
+ AttribDict = self._GetGuidAttrib()
+
+ if not self._IsToken("{"):
+ raise Warning.ExpectedCurlyOpen(self.FileName, self.CurrentLineNumber)
+ GuidSectionObj = GuidSection()
+ GuidSectionObj.NameGuid = GuidValue
+ GuidSectionObj.SectionType = "GUIDED"
+ GuidSectionObj.ProcessRequired = AttribDict["PROCESSING_REQUIRED"]
+ GuidSectionObj.AuthStatusValid = AttribDict["AUTH_STATUS_VALID"]
+ GuidSectionObj.ExtraHeaderSize = AttribDict["EXTRA_HEADER_SIZE"]
+
+ # Efi sections...
+ while True:
+ IsEncapsulate = self._GetRuleEncapsulationSection(GuidSectionObj)
+ IsLeaf = self._GetEfiSection(GuidSectionObj)
+ if not IsEncapsulate and not IsLeaf:
+ break
+
+ if not self._IsToken(T_CHAR_BRACE_R):
+ raise Warning.ExpectedCurlyClose(self.FileName, self.CurrentLineNumber)
+ theRule.SectionList.append(GuidSectionObj)
+
+ return True
+
+ return False
+
+ ## _GetOptionRom() method
+ #
+ # Get OptionROM section contents and store its data into OptionROM list of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a OptionROM
+ # @retval False Not able to find a OptionROM
+ #
+ def _GetOptionRom(self):
+ if not self._GetNextToken():
+ return False
+
+ S = self._Token.upper()
+ if S.startswith(TAB_SECTION_START) and not S.startswith("[OPTIONROM."):
+ self.SectionParser(S)
+ self._UndoToken()
+ return False
+
+ self._UndoToken()
+ if not self._IsToken("[OptionRom.", True):
+ raise Warning("Unknown Keyword '%s'" % self._Token, self.FileName, self.CurrentLineNumber)
+
+ OptRomName = self._GetUiName()
+
+ if not self._IsToken(TAB_SECTION_END):
+ raise Warning.ExpectedBracketClose(self.FileName, self.CurrentLineNumber)
+
+ OptRomObj = OPTIONROM(OptRomName)
+ self.Profile.OptRomDict[OptRomName] = OptRomObj
+
+ while True:
+ isInf = self._GetOptRomInfStatement(OptRomObj)
+ isFile = self._GetOptRomFileStatement(OptRomObj)
+ if not isInf and not isFile:
+ break
+
+ return True
+
+ ## _GetOptRomInfStatement() method
+ #
+ # Get INF statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom inf statement is got
+ # @retval True Successfully find inf statement
+ # @retval False Not able to find inf statement
+ #
+ def _GetOptRomInfStatement(self, Obj):
+ if not self._IsKeyword("INF"):
+ return False
+
+ ffsInf = OptRomInfStatement()
+ self._GetInfOptions(ffsInf)
+
+ if not self._GetNextToken():
+ raise Warning.Expected("INF file path", self.FileName, self.CurrentLineNumber)
+ ffsInf.InfFileName = self._Token
+ if ffsInf.InfFileName.replace(TAB_WORKSPACE, '').find('$') == -1:
+ #check for file path
+ ErrorCode, ErrorInfo = PathClass(NormPath(ffsInf.InfFileName), GenFdsGlobalVariable.WorkSpaceDir).Validate()
+ if ErrorCode != 0:
+ EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
+
+ NewFileName = ffsInf.InfFileName
+ if ffsInf.OverrideGuid:
+ NewFileName = ProcessDuplicatedInf(PathClass(ffsInf.InfFileName,GenFdsGlobalVariable.WorkSpaceDir), ffsInf.OverrideGuid, GenFdsGlobalVariable.WorkSpaceDir).Path
+
+ if not NewFileName in self.Profile.InfList:
+ self.Profile.InfList.append(NewFileName)
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.InfFileLineList.append(FileLineTuple)
+ if ffsInf.UseArch:
+ if ffsInf.UseArch not in self.Profile.InfDict:
+ self.Profile.InfDict[ffsInf.UseArch] = [ffsInf.InfFileName]
+ else:
+ self.Profile.InfDict[ffsInf.UseArch].append(ffsInf.InfFileName)
+ else:
+ self.Profile.InfDict['ArchTBD'].append(ffsInf.InfFileName)
+
+
+ self._GetOptRomOverrides (ffsInf)
+
+ Obj.FfsList.append(ffsInf)
+ return True
+
+ ## _GetOptRomOverrides() method
+ #
+ # Get overrides for OptROM INF & FILE
+ #
+ # @param self The object pointer
+ # @param FfsInfObj for whom overrides is got
+ #
+ def _GetOptRomOverrides(self, Obj):
+ if self._IsToken('{'):
+ Overrides = OverrideAttribs()
+ while True:
+ if self._IsKeyword("PCI_VENDOR_ID"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextHexNumber():
+ raise Warning.Expected("Hex vendor id", self.FileName, self.CurrentLineNumber)
+ Overrides.PciVendorId = self._Token
+ continue
+
+ if self._IsKeyword("PCI_CLASS_CODE"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextHexNumber():
+ raise Warning.Expected("Hex class code", self.FileName, self.CurrentLineNumber)
+ Overrides.PciClassCode = self._Token
+ continue
+
+ if self._IsKeyword("PCI_DEVICE_ID"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ # Get a list of PCI IDs
+ Overrides.PciDeviceId = ""
+ while (self._GetNextHexNumber()):
+ Overrides.PciDeviceId = "{} {}".format(Overrides.PciDeviceId, self._Token)
+ if not Overrides.PciDeviceId:
+ raise Warning.Expected("one or more Hex device ids", self.FileName, self.CurrentLineNumber)
+ continue
+
+ if self._IsKeyword("PCI_REVISION"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextHexNumber():
+ raise Warning.Expected("Hex revision", self.FileName, self.CurrentLineNumber)
+ Overrides.PciRevision = self._Token
+ continue
+
+ if self._IsKeyword("PCI_COMPRESS"):
+ if not self._IsToken(TAB_EQUAL_SPLIT):
+ raise Warning.ExpectedEquals(self.FileName, self.CurrentLineNumber)
+ if not self._GetNextToken():
+ raise Warning.Expected("TRUE/FALSE for compress", self.FileName, self.CurrentLineNumber)
+ Overrides.NeedCompress = self._Token.upper() == 'TRUE'
+ continue
+
+ if self._IsToken(T_CHAR_BRACE_R):
+ break
+ else:
+ EdkLogger.error("FdfParser", FORMAT_INVALID, File=self.FileName, Line=self.CurrentLineNumber)
+
+ Obj.OverrideAttribs = Overrides
+
+ ## _GetOptRomFileStatement() method
+ #
+ # Get FILE statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom FILE statement is got
+ # @retval True Successfully find FILE statement
+ # @retval False Not able to find FILE statement
+ #
+ def _GetOptRomFileStatement(self, Obj):
+ if not self._IsKeyword("FILE"):
+ return False
+
+ FfsFileObj = OptRomFileStatement()
+
+ if not self._IsKeyword("EFI") and not self._IsKeyword(BINARY_FILE_TYPE_BIN):
+ raise Warning.Expected("Binary type (EFI/BIN)", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FileType = self._Token
+
+ if not self._GetNextToken():
+ raise Warning.Expected("File path", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FileName = self._Token
+ if FfsFileObj.FileName.replace(TAB_WORKSPACE, '').find('$') == -1:
+ #check for file path
+ ErrorCode, ErrorInfo = PathClass(NormPath(FfsFileObj.FileName), GenFdsGlobalVariable.WorkSpaceDir).Validate()
+ if ErrorCode != 0:
+ EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
+
+ if FfsFileObj.FileType == 'EFI':
+ self._GetOptRomOverrides(FfsFileObj)
+
+ Obj.FfsList.append(FfsFileObj)
+
+ return True
+
+ ## _GetCapInFd() method
+ #
+ # Get Cap list contained in FD
+ #
+ # @param self The object pointer
+ # @param FdName FD name
+ # @retval CapList List of Capsule in FD
+ #
+ def _GetCapInFd (self, FdName):
+ CapList = []
+ if FdName.upper() in self.Profile.FdDict:
+ FdObj = self.Profile.FdDict[FdName.upper()]
+ for elementRegion in FdObj.RegionList:
+ if elementRegion.RegionType == 'CAPSULE':
+ for elementRegionData in elementRegion.RegionDataList:
+ if elementRegionData.endswith(".cap"):
+ continue
+ if elementRegionData is not None and elementRegionData.upper() not in CapList:
+ CapList.append(elementRegionData.upper())
+ return CapList
+
+ ## _GetReferencedFdCapTuple() method
+ #
+ # Get FV and FD list referenced by a capsule image
+ #
+ # @param self The object pointer
+ # @param CapObj Capsule section to be searched
+ # @param RefFdList referenced FD by section
+ # @param RefFvList referenced FV by section
+ #
+ def _GetReferencedFdCapTuple(self, CapObj, RefFdList = [], RefFvList = []):
+ for CapsuleDataObj in CapObj.CapsuleDataList:
+ if hasattr(CapsuleDataObj, 'FvName') and CapsuleDataObj.FvName is not None and CapsuleDataObj.FvName.upper() not in RefFvList:
+ RefFvList.append (CapsuleDataObj.FvName.upper())
+ elif hasattr(CapsuleDataObj, 'FdName') and CapsuleDataObj.FdName is not None and CapsuleDataObj.FdName.upper() not in RefFdList:
+ RefFdList.append (CapsuleDataObj.FdName.upper())
+ elif CapsuleDataObj.Ffs is not None:
+ if isinstance(CapsuleDataObj.Ffs, FileStatement):
+ if CapsuleDataObj.Ffs.FvName is not None and CapsuleDataObj.Ffs.FvName.upper() not in RefFvList:
+ RefFvList.append(CapsuleDataObj.Ffs.FvName.upper())
+ elif CapsuleDataObj.Ffs.FdName is not None and CapsuleDataObj.Ffs.FdName.upper() not in RefFdList:
+ RefFdList.append(CapsuleDataObj.Ffs.FdName.upper())
+ else:
+ self._GetReferencedFdFvTupleFromSection(CapsuleDataObj.Ffs, RefFdList, RefFvList)
+
+ ## _GetFvInFd() method
+ #
+ # Get FV list contained in FD
+ #
+ # @param self The object pointer
+ # @param FdName FD name
+ # @retval FvList list of FV in FD
+ #
+ def _GetFvInFd (self, FdName):
+ FvList = []
+ if FdName.upper() in self.Profile.FdDict:
+ FdObj = self.Profile.FdDict[FdName.upper()]
+ for elementRegion in FdObj.RegionList:
+ if elementRegion.RegionType == BINARY_FILE_TYPE_FV:
+ for elementRegionData in elementRegion.RegionDataList:
+ if elementRegionData.endswith(".fv"):
+ continue
+ if elementRegionData is not None and elementRegionData.upper() not in FvList:
+ FvList.append(elementRegionData.upper())
+ return FvList
+
+ ## _GetReferencedFdFvTuple() method
+ #
+ # Get FD and FV list referenced by a FFS file
+ #
+ # @param self The object pointer
+ # @param FfsFile contains sections to be searched
+ # @param RefFdList referenced FD by section
+ # @param RefFvList referenced FV by section
+ #
+ def _GetReferencedFdFvTuple(self, FvObj, RefFdList = [], RefFvList = []):
+ for FfsObj in FvObj.FfsList:
+ if isinstance(FfsObj, FileStatement):
+ if FfsObj.FvName is not None and FfsObj.FvName.upper() not in RefFvList:
+ RefFvList.append(FfsObj.FvName.upper())
+ elif FfsObj.FdName is not None and FfsObj.FdName.upper() not in RefFdList:
+ RefFdList.append(FfsObj.FdName.upper())
+ else:
+ self._GetReferencedFdFvTupleFromSection(FfsObj, RefFdList, RefFvList)
+
+ ## _GetReferencedFdFvTupleFromSection() method
+ #
+ # Get FD and FV list referenced by a FFS section
+ #
+ # @param self The object pointer
+ # @param FfsFile contains sections to be searched
+ # @param FdList referenced FD by section
+ # @param FvList referenced FV by section
+ #
+ def _GetReferencedFdFvTupleFromSection(self, FfsFile, FdList = [], FvList = []):
+ SectionStack = list(FfsFile.SectionList)
+ while SectionStack != []:
+ SectionObj = SectionStack.pop()
+ if isinstance(SectionObj, FvImageSection):
+ if SectionObj.FvName is not None and SectionObj.FvName.upper() not in FvList:
+ FvList.append(SectionObj.FvName.upper())
+ if SectionObj.Fv is not None and SectionObj.Fv.UiFvName is not None and SectionObj.Fv.UiFvName.upper() not in FvList:
+ FvList.append(SectionObj.Fv.UiFvName.upper())
+ self._GetReferencedFdFvTuple(SectionObj.Fv, FdList, FvList)
+
+ if isinstance(SectionObj, CompressSection) or isinstance(SectionObj, GuidSection):
+ SectionStack.extend(SectionObj.SectionList)
+
+ ## CycleReferenceCheck() method
+ #
+ # Check whether cycle reference exists in FDF
+ #
+ # @param self The object pointer
+ # @retval True cycle reference exists
+ # @retval False Not exists cycle reference
+ #
+ def CycleReferenceCheck(self):
+ #
+ # Check the cycle between FV and FD image
+ #
+ MaxLength = len (self.Profile.FvDict)
+ for FvName in self.Profile.FvDict:
+ LogStr = "\nCycle Reference Checking for FV: %s\n" % FvName
+ RefFvStack = set(FvName)
+ FdAnalyzedList = set()
+
+ Index = 0
+ while RefFvStack and Index < MaxLength:
+ Index = Index + 1
+ FvNameFromStack = RefFvStack.pop()
+ if FvNameFromStack.upper() in self.Profile.FvDict:
+ FvObj = self.Profile.FvDict[FvNameFromStack.upper()]
+ else:
+ continue
+
+ RefFdList = []
+ RefFvList = []
+ self._GetReferencedFdFvTuple(FvObj, RefFdList, RefFvList)
+
+ for RefFdName in RefFdList:
+ if RefFdName in FdAnalyzedList:
+ continue
+
+ LogStr += "FV %s contains FD %s\n" % (FvNameFromStack, RefFdName)
+ FvInFdList = self._GetFvInFd(RefFdName)
+ if FvInFdList != []:
+ for FvNameInFd in FvInFdList:
+ LogStr += "FD %s contains FV %s\n" % (RefFdName, FvNameInFd)
+ if FvNameInFd not in RefFvStack:
+ RefFvStack.add(FvNameInFd)
+
+ if FvName in RefFvStack or FvNameFromStack in RefFvStack:
+ EdkLogger.info(LogStr)
+ return True
+ FdAnalyzedList.add(RefFdName)
+
+ for RefFvName in RefFvList:
+ LogStr += "FV %s contains FV %s\n" % (FvNameFromStack, RefFvName)
+ if RefFvName not in RefFvStack:
+ RefFvStack.add(RefFvName)
+
+ if FvName in RefFvStack or FvNameFromStack in RefFvStack:
+ EdkLogger.info(LogStr)
+ return True
+
+ #
+ # Check the cycle between Capsule and FD image
+ #
+ MaxLength = len (self.Profile.CapsuleDict)
+ for CapName in self.Profile.CapsuleDict:
+ #
+ # Capsule image to be checked.
+ #
+ LogStr = "\n\n\nCycle Reference Checking for Capsule: %s\n" % CapName
+ RefCapStack = {CapName}
+ FdAnalyzedList = set()
+ FvAnalyzedList = set()
+
+ Index = 0
+ while RefCapStack and Index < MaxLength:
+ Index = Index + 1
+ CapNameFromStack = RefCapStack.pop()
+ if CapNameFromStack.upper() in self.Profile.CapsuleDict:
+ CapObj = self.Profile.CapsuleDict[CapNameFromStack.upper()]
+ else:
+ continue
+
+ RefFvList = []
+ RefFdList = []
+ self._GetReferencedFdCapTuple(CapObj, RefFdList, RefFvList)
+
+ FvListLength = 0
+ FdListLength = 0
+ while FvListLength < len (RefFvList) or FdListLength < len (RefFdList):
+ for RefFdName in RefFdList:
+ if RefFdName in FdAnalyzedList:
+ continue
+
+ LogStr += "Capsule %s contains FD %s\n" % (CapNameFromStack, RefFdName)
+ for CapNameInFd in self._GetCapInFd(RefFdName):
+ LogStr += "FD %s contains Capsule %s\n" % (RefFdName, CapNameInFd)
+ if CapNameInFd not in RefCapStack:
+ RefCapStack.append(CapNameInFd)
+
+ if CapName in RefCapStack or CapNameFromStack in RefCapStack:
+ EdkLogger.info(LogStr)
+ return True
+
+ for FvNameInFd in self._GetFvInFd(RefFdName):
+ LogStr += "FD %s contains FV %s\n" % (RefFdName, FvNameInFd)
+ if FvNameInFd not in RefFvList:
+ RefFvList.append(FvNameInFd)
+
+ FdAnalyzedList.add(RefFdName)
+ #
+ # the number of the parsed FV and FD image
+ #
+ FvListLength = len (RefFvList)
+ FdListLength = len (RefFdList)
+ for RefFvName in RefFvList:
+ if RefFvName in FvAnalyzedList:
+ continue
+ LogStr += "Capsule %s contains FV %s\n" % (CapNameFromStack, RefFvName)
+ if RefFvName.upper() in self.Profile.FvDict:
+ FvObj = self.Profile.FvDict[RefFvName.upper()]
+ else:
+ continue
+ self._GetReferencedFdFvTuple(FvObj, RefFdList, RefFvList)
+ FvAnalyzedList.add(RefFvName)
+
+ return False
+
+ def GetAllIncludedFile (self):
+ global AllIncludeFileList
+ return AllIncludeFileList
+
+if __name__ == "__main__":
+ import sys
+ try:
+ test_file = sys.argv[1]
+ except IndexError as v:
+ print("Usage: %s filename" % sys.argv[0])
+ sys.exit(1)
+
+ parser = FdfParser(test_file)
+ try:
+ parser.ParseFile()
+ parser.CycleReferenceCheck()
+ except Warning as X:
+ print(str(X))
+ else:
+ print("Success!")
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Ffs.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Ffs.py
new file mode 100755
index 00000000..f55524c3
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Ffs.py
@@ -0,0 +1,49 @@
+## @file
+# process FFS generation
+#
+# Copyright (c) 2007-2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from Common.DataType import *
+
+# mapping between FILE type in FDF and file type for GenFfs
+FdfFvFileTypeToFileType = {
+ SUP_MODULE_SEC : 'EFI_FV_FILETYPE_SECURITY_CORE',
+ SUP_MODULE_PEI_CORE : 'EFI_FV_FILETYPE_PEI_CORE',
+ SUP_MODULE_PEIM : 'EFI_FV_FILETYPE_PEIM',
+ SUP_MODULE_DXE_CORE : 'EFI_FV_FILETYPE_DXE_CORE',
+ 'FREEFORM' : 'EFI_FV_FILETYPE_FREEFORM',
+ 'DRIVER' : 'EFI_FV_FILETYPE_DRIVER',
+ 'APPLICATION' : 'EFI_FV_FILETYPE_APPLICATION',
+ 'FV_IMAGE' : 'EFI_FV_FILETYPE_FIRMWARE_VOLUME_IMAGE',
+ 'RAW' : 'EFI_FV_FILETYPE_RAW',
+ 'PEI_DXE_COMBO' : 'EFI_FV_FILETYPE_COMBINED_PEIM_DRIVER',
+ 'SMM' : 'EFI_FV_FILETYPE_SMM',
+ SUP_MODULE_SMM_CORE : 'EFI_FV_FILETYPE_SMM_CORE',
+ SUP_MODULE_MM_STANDALONE : 'EFI_FV_FILETYPE_MM_STANDALONE',
+ SUP_MODULE_MM_CORE_STANDALONE : 'EFI_FV_FILETYPE_MM_CORE_STANDALONE'
+}
+
+# mapping between section type in FDF and file suffix
+SectionSuffix = {
+ BINARY_FILE_TYPE_PE32 : '.pe32',
+ BINARY_FILE_TYPE_PIC : '.pic',
+ BINARY_FILE_TYPE_TE : '.te',
+ BINARY_FILE_TYPE_DXE_DEPEX : '.dpx',
+ 'VERSION' : '.ver',
+ BINARY_FILE_TYPE_UI : '.ui',
+ 'COMPAT16' : '.com16',
+ 'RAW' : '.raw',
+ 'FREEFORM_SUBTYPE_GUID': '.guid',
+ 'SUBTYPE_GUID' : '.guid',
+ 'FV_IMAGE' : 'fv.sec',
+ 'COMPRESS' : '.com',
+ 'GUIDED' : '.guided',
+ BINARY_FILE_TYPE_PEI_DEPEX : '.dpx',
+ BINARY_FILE_TYPE_SMM_DEPEX : '.dpx'
+}
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FfsFileStatement.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FfsFileStatement.py
new file mode 100755
index 00000000..7ab1507d
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FfsFileStatement.py
@@ -0,0 +1,175 @@
+## @file
+# process FFS generation from FILE statement
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from io import BytesIO
+from struct import pack
+from CommonDataClass.FdfClass import FileStatementClassObject
+from Common import EdkLogger
+from Common.BuildToolError import GENFDS_ERROR
+from Common.Misc import GuidStructureByteArrayToGuidString, SaveFileOnChange
+import Common.LongFilePathOs as os
+from .GuidSection import GuidSection
+from .FvImageSection import FvImageSection
+from .Ffs import FdfFvFileTypeToFileType
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+import shutil
+
+## generate FFS from FILE
+#
+#
+class FileStatement (FileStatementClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FileStatementClassObject.__init__(self)
+ self.CurrentLineNum = None
+ self.CurrentLineContent = None
+ self.FileName = None
+ self.InfFileName = None
+ self.SubAlignment = None
+
+ ## GenFfs() method
+ #
+ # Generate FFS
+ #
+ # @param self The object pointer
+ # @param Dict dictionary contains macro and value pair
+ # @param FvChildAddr Array of the inside FvImage base address
+ # @param FvParentAddr Parent Fv base address
+ # @retval string Generated FFS file name
+ #
+ def GenFfs(self, Dict = None, FvChildAddr=[], FvParentAddr=None, IsMakefile=False, FvName=None):
+
+ if self.NameGuid and self.NameGuid.startswith('PCD('):
+ PcdValue = GenFdsGlobalVariable.GetPcdValue(self.NameGuid)
+ if len(PcdValue) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
+ % (self.NameGuid))
+ if PcdValue.startswith('{'):
+ PcdValue = GuidStructureByteArrayToGuidString(PcdValue)
+ RegistryGuidStr = PcdValue
+ if len(RegistryGuidStr) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'GUID value for %s in wrong format.' \
+ % (self.NameGuid))
+ self.NameGuid = RegistryGuidStr
+
+ Str = self.NameGuid
+ if FvName:
+ Str += FvName
+ OutputDir = os.path.join(GenFdsGlobalVariable.FfsDir, Str)
+ if os.path.exists(OutputDir):
+ shutil.rmtree(OutputDir)
+ if not os.path.exists(OutputDir):
+ os.makedirs(OutputDir)
+
+ if Dict is None:
+ Dict = {}
+
+ Dict.update(self.DefineVarDict)
+ SectionAlignments = None
+ if self.FvName:
+ Buffer = BytesIO()
+ if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName))
+ Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper())
+ FileName = Fv.AddToBuffer(Buffer)
+ SectionFiles = [FileName]
+
+ elif self.FdName:
+ if self.FdName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FdDict:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "FD (%s) is NOT described in FDF file!" % (self.FdName))
+ Fd = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper())
+ FileName = Fd.GenFd()
+ SectionFiles = [FileName]
+
+ elif self.FileName:
+ if hasattr(self, 'FvFileType') and self.FvFileType == 'RAW':
+ if isinstance(self.FileName, list) and isinstance(self.SubAlignment, list) and len(self.FileName) == len(self.SubAlignment):
+ FileContent = BytesIO()
+ MaxAlignIndex = 0
+ MaxAlignValue = 1
+ for Index, File in enumerate(self.FileName):
+ try:
+ f = open(File, 'rb')
+ except:
+ GenFdsGlobalVariable.ErrorLogger("Error opening RAW file %s." % (File))
+ Content = f.read()
+ f.close()
+ AlignValue = 1
+ if self.SubAlignment[Index]:
+ AlignValue = GenFdsGlobalVariable.GetAlignment(self.SubAlignment[Index])
+ if AlignValue > MaxAlignValue:
+ MaxAlignIndex = Index
+ MaxAlignValue = AlignValue
+ FileContent.write(Content)
+ if len(FileContent.getvalue()) % AlignValue != 0:
+ Size = AlignValue - len(FileContent.getvalue()) % AlignValue
+ for i in range(0, Size):
+ FileContent.write(pack('B', 0xFF))
+
+ if FileContent.getvalue() != b'':
+ OutputRAWFile = os.path.join(GenFdsGlobalVariable.FfsDir, self.NameGuid, self.NameGuid + '.raw')
+ SaveFileOnChange(OutputRAWFile, FileContent.getvalue(), True)
+ self.FileName = OutputRAWFile
+ self.SubAlignment = self.SubAlignment[MaxAlignIndex]
+
+ if self.Alignment and self.SubAlignment:
+ if GenFdsGlobalVariable.GetAlignment (self.Alignment) < GenFdsGlobalVariable.GetAlignment (self.SubAlignment):
+ self.Alignment = self.SubAlignment
+ elif self.SubAlignment:
+ self.Alignment = self.SubAlignment
+
+ self.FileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
+ #Replace $(SAPCE) with real space
+ self.FileName = self.FileName.replace('$(SPACE)', ' ')
+ SectionFiles = [GenFdsGlobalVariable.MacroExtend(self.FileName, Dict)]
+
+ else:
+ SectionFiles = []
+ Index = 0
+ SectionAlignments = []
+ for section in self.SectionList:
+ Index = Index + 1
+ SecIndex = '%d' %Index
+ # process the inside FvImage from FvSection or GuidSection
+ if FvChildAddr != []:
+ if isinstance(section, FvImageSection):
+ section.FvAddr = FvChildAddr.pop(0)
+ elif isinstance(section, GuidSection):
+ section.FvAddr = FvChildAddr
+ if FvParentAddr and isinstance(section, GuidSection):
+ section.FvParentAddr = FvParentAddr
+
+ if self.KeepReloc == False:
+ section.KeepReloc = False
+ sectList, align = section.GenSection(OutputDir, self.NameGuid, SecIndex, self.KeyStringList, None, Dict)
+ if sectList != []:
+ for sect in sectList:
+ SectionFiles.append(sect)
+ SectionAlignments.append(align)
+
+ #
+ # Prepare the parameter
+ #
+ FfsFileOutput = os.path.join(OutputDir, self.NameGuid + '.ffs')
+ GenFdsGlobalVariable.GenerateFfs(FfsFileOutput, SectionFiles,
+ FdfFvFileTypeToFileType.get(self.FvFileType),
+ self.NameGuid,
+ Fixed=self.Fixed,
+ CheckSum=self.CheckSum,
+ Align=self.Alignment,
+ SectionAlign=SectionAlignments
+ )
+
+ return FfsFileOutput
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FfsInfStatement.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FfsInfStatement.py
new file mode 100755
index 00000000..b7b94231
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FfsInfStatement.py
@@ -0,0 +1,1128 @@
+## @file
+# process FFS generation from INF statement
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2014-2016 Hewlett-Packard Development Company, L.P.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from . import Rule
+import Common.LongFilePathOs as os
+from io import BytesIO
+from struct import *
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+from .Ffs import SectionSuffix,FdfFvFileTypeToFileType
+import subprocess
+import sys
+from . import Section
+from . import RuleSimpleFile
+from . import RuleComplexFile
+from CommonDataClass.FdfClass import FfsInfStatementClassObject
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from Common.DataType import SUP_MODULE_USER_DEFINED
+from Common.DataType import SUP_MODULE_HOST_APPLICATION
+from Common.StringUtils import *
+from Common.Misc import PathClass
+from Common.Misc import GuidStructureByteArrayToGuidString
+from Common.Misc import ProcessDuplicatedInf
+from Common.Misc import GetVariableOffset
+from Common import EdkLogger
+from Common.BuildToolError import *
+from .GuidSection import GuidSection
+from .FvImageSection import FvImageSection
+from Common.Misc import PeImageClass
+from AutoGen.GenDepex import DependencyExpression
+from PatchPcdValue.PatchPcdValue import PatchBinaryFile
+from Common.LongFilePathSupport import CopyLongFilePath
+from Common.LongFilePathSupport import OpenLongFilePath as open
+import Common.GlobalData as GlobalData
+from .DepexSection import DepexSection
+from Common.Misc import SaveFileOnChange
+from Common.Expression import *
+from Common.DataType import *
+
+## generate FFS from INF
+#
+#
+class FfsInfStatement(FfsInfStatementClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FfsInfStatementClassObject.__init__(self)
+ self.TargetOverrideList = []
+ self.ShadowFromInfFile = None
+ self.KeepRelocFromRule = None
+ self.InDsc = True
+ self.OptRomDefs = {}
+ self.PiSpecVersion = '0x00000000'
+ self.InfModule = None
+ self.FinalTargetSuffixMap = {}
+ self.CurrentLineNum = None
+ self.CurrentLineContent = None
+ self.FileName = None
+ self.InfFileName = None
+ self.OverrideGuid = None
+ self.PatchedBinFile = ''
+ self.MacroDict = {}
+ self.Depex = False
+
+ ## GetFinalTargetSuffixMap() method
+ #
+ # Get final build target list
+ def GetFinalTargetSuffixMap(self):
+ if not self.InfModule or not self.CurrentArch:
+ return []
+ if not self.FinalTargetSuffixMap:
+ FinalBuildTargetList = GenFdsGlobalVariable.GetModuleCodaTargetList(self.InfModule, self.CurrentArch)
+ for File in FinalBuildTargetList:
+ self.FinalTargetSuffixMap.setdefault(os.path.splitext(File)[1], []).append(File)
+
+ # Check if current INF module has DEPEX
+ if '.depex' not in self.FinalTargetSuffixMap and self.InfModule.ModuleType != SUP_MODULE_USER_DEFINED and self.InfModule.ModuleType != SUP_MODULE_HOST_APPLICATION \
+ and not self.InfModule.DxsFile and not self.InfModule.LibraryClass:
+ ModuleType = self.InfModule.ModuleType
+ PlatformDataBase = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
+
+ if ModuleType != SUP_MODULE_USER_DEFINED and ModuleType != SUP_MODULE_HOST_APPLICATION:
+ for LibraryClass in PlatformDataBase.LibraryClasses.GetKeys():
+ if LibraryClass.startswith("NULL") and PlatformDataBase.LibraryClasses[LibraryClass, ModuleType]:
+ self.InfModule.LibraryClasses[LibraryClass] = PlatformDataBase.LibraryClasses[LibraryClass, ModuleType]
+
+ StrModule = str(self.InfModule)
+ PlatformModule = None
+ if StrModule in PlatformDataBase.Modules:
+ PlatformModule = PlatformDataBase.Modules[StrModule]
+ for LibraryClass in PlatformModule.LibraryClasses:
+ if LibraryClass.startswith("NULL"):
+ self.InfModule.LibraryClasses[LibraryClass] = PlatformModule.LibraryClasses[LibraryClass]
+
+ DependencyList = [self.InfModule]
+ LibraryInstance = {}
+ DepexList = []
+ while len(DependencyList) > 0:
+ Module = DependencyList.pop(0)
+ if not Module:
+ continue
+ for Dep in Module.Depex[self.CurrentArch, ModuleType]:
+ if DepexList != []:
+ DepexList.append('AND')
+ DepexList.append('(')
+ DepexList.extend(Dep)
+ if DepexList[-1] == 'END': # no need of a END at this time
+ DepexList.pop()
+ DepexList.append(')')
+ if 'BEFORE' in DepexList or 'AFTER' in DepexList:
+ break
+ for LibName in Module.LibraryClasses:
+ if LibName in LibraryInstance:
+ continue
+ if PlatformModule and LibName in PlatformModule.LibraryClasses:
+ LibraryPath = PlatformModule.LibraryClasses[LibName]
+ else:
+ LibraryPath = PlatformDataBase.LibraryClasses[LibName, ModuleType]
+ if not LibraryPath:
+ LibraryPath = Module.LibraryClasses[LibName]
+ if not LibraryPath:
+ continue
+ LibraryModule = GenFdsGlobalVariable.WorkSpace.BuildObject[LibraryPath, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
+ LibraryInstance[LibName] = LibraryModule
+ DependencyList.append(LibraryModule)
+ if DepexList:
+ Dpx = DependencyExpression(DepexList, ModuleType, True)
+ if len(Dpx.PostfixNotation) != 0:
+ # It means this module has DEPEX
+ self.FinalTargetSuffixMap['.depex'] = [os.path.join(self.EfiOutputPath, self.BaseName) + '.depex']
+ return self.FinalTargetSuffixMap
+
+ ## __InfParse() method
+ #
+ # Parse inf file to get module information
+ #
+ # @param self The object pointer
+ # @param Dict dictionary contains macro and value pair
+ #
+ def __InfParse__(self, Dict = None, IsGenFfs=False):
+
+ GenFdsGlobalVariable.VerboseLogger( " Begine parsing INf file : %s" %self.InfFileName)
+
+ self.InfFileName = self.InfFileName.replace('$(WORKSPACE)', '')
+ if len(self.InfFileName) > 1 and self.InfFileName[0] == '\\' and self.InfFileName[1] == '\\':
+ pass
+ elif self.InfFileName[0] == '\\' or self.InfFileName[0] == '/' :
+ self.InfFileName = self.InfFileName[1:]
+
+ if self.InfFileName.find('$') == -1:
+ InfPath = NormPath(self.InfFileName)
+ if not os.path.exists(InfPath):
+ InfPath = GenFdsGlobalVariable.ReplaceWorkspaceMacro(InfPath)
+ if not os.path.exists(InfPath):
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Non-existant Module %s !" % (self.InfFileName))
+
+ self.CurrentArch = self.GetCurrentArch()
+ #
+ # Get the InfClass object
+ #
+
+ PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
+ ErrorCode, ErrorInfo = PathClassObj.Validate(".inf")
+ if ErrorCode != 0:
+ EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
+
+ #
+ # Cache lower case version of INF path before processing FILE_GUID override
+ #
+ InfLowerPath = str(PathClassObj).lower()
+ if self.OverrideGuid:
+ PathClassObj = ProcessDuplicatedInf(PathClassObj, self.OverrideGuid, GenFdsGlobalVariable.WorkSpaceDir)
+ if self.CurrentArch is not None:
+
+ Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClassObj, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
+ #
+ # Set Ffs BaseName, ModuleGuid, ModuleType, Version, OutputPath
+ #
+ self.BaseName = Inf.BaseName
+ self.ModuleGuid = Inf.Guid
+ self.ModuleType = Inf.ModuleType
+ if Inf.Specification is not None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
+ self.PiSpecVersion = Inf.Specification['PI_SPECIFICATION_VERSION']
+ if Inf.AutoGenVersion < 0x00010005:
+ self.ModuleType = Inf.ComponentType
+ self.VersionString = Inf.Version
+ self.BinFileList = Inf.Binaries
+ self.SourceFileList = Inf.Sources
+ if self.KeepReloc is None and Inf.Shadow:
+ self.ShadowFromInfFile = Inf.Shadow
+
+ else:
+ Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClassObj, TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
+ self.BaseName = Inf.BaseName
+ self.ModuleGuid = Inf.Guid
+ self.ModuleType = Inf.ModuleType
+ if Inf.Specification is not None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
+ self.PiSpecVersion = Inf.Specification['PI_SPECIFICATION_VERSION']
+ self.VersionString = Inf.Version
+ self.BinFileList = Inf.Binaries
+ self.SourceFileList = Inf.Sources
+ if self.BinFileList == []:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "INF %s specified in FDF could not be found in build ARCH %s!" \
+ % (self.InfFileName, GenFdsGlobalVariable.ArchList))
+
+ if self.OverrideGuid:
+ self.ModuleGuid = self.OverrideGuid
+
+ if len(self.SourceFileList) != 0 and not self.InDsc:
+ EdkLogger.warn("GenFds", GENFDS_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % (self.InfFileName))
+
+ if self.ModuleType == SUP_MODULE_SMM_CORE and int(self.PiSpecVersion, 16) < 0x0001000A:
+ EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.InfFileName)
+
+ if self.ModuleType == SUP_MODULE_MM_CORE_STANDALONE and int(self.PiSpecVersion, 16) < 0x00010032:
+ EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "MM_CORE_STANDALONE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x00010032", File=self.InfFileName)
+
+ if Inf._Defs is not None and len(Inf._Defs) > 0:
+ self.OptRomDefs.update(Inf._Defs)
+
+ self.PatchPcds = []
+ InfPcds = Inf.Pcds
+ Platform = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
+ FdfPcdDict = GenFdsGlobalVariable.FdfParser.Profile.PcdDict
+ PlatformPcds = Platform.Pcds
+
+ # Workaround here: both build and GenFds tool convert the workspace path to lower case
+ # But INF file path in FDF and DSC file may have real case characters.
+ # Try to convert the path to lower case to see if PCDs value are override by DSC.
+ DscModules = {}
+ for DscModule in Platform.Modules:
+ DscModules[str(DscModule).lower()] = Platform.Modules[DscModule]
+ for PcdKey in InfPcds:
+ Pcd = InfPcds[PcdKey]
+ if not hasattr(Pcd, 'Offset'):
+ continue
+ if Pcd.Type != TAB_PCDS_PATCHABLE_IN_MODULE:
+ continue
+ # Override Patchable PCD value by the value from DSC
+ PatchPcd = None
+ if InfLowerPath in DscModules and PcdKey in DscModules[InfLowerPath].Pcds:
+ PatchPcd = DscModules[InfLowerPath].Pcds[PcdKey]
+ elif PcdKey in Platform.Pcds:
+ PatchPcd = Platform.Pcds[PcdKey]
+ DscOverride = False
+ if PatchPcd and Pcd.Type == PatchPcd.Type:
+ DefaultValue = PatchPcd.DefaultValue
+ DscOverride = True
+
+ # Override Patchable PCD value by the value from FDF
+ FdfOverride = False
+ if PcdKey in FdfPcdDict:
+ DefaultValue = FdfPcdDict[PcdKey]
+ FdfOverride = True
+
+ # Override Patchable PCD value by the value from Build Option
+ BuildOptionOverride = False
+ if GlobalData.BuildOptionPcd:
+ for pcd in GlobalData.BuildOptionPcd:
+ if PcdKey == (pcd[1], pcd[0]):
+ if pcd[2]:
+ continue
+ DefaultValue = pcd[3]
+ BuildOptionOverride = True
+ break
+
+ if not DscOverride and not FdfOverride and not BuildOptionOverride:
+ continue
+
+ # Support Flexible PCD format
+ if DefaultValue:
+ try:
+ DefaultValue = ValueExpressionEx(DefaultValue, Pcd.DatumType, Platform._GuidDict)(True)
+ except BadExpression:
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'PCD [%s.%s] Value "%s"' %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName, DefaultValue), File=self.InfFileName)
+
+ if Pcd.InfDefaultValue:
+ try:
+ Pcd.InfDefaultValue = ValueExpressionEx(Pcd.InfDefaultValue, Pcd.DatumType, Platform._GuidDict)(True)
+ except BadExpression:
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'PCD [%s.%s] Value "%s"' %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.DefaultValue), File=self.InfFileName)
+
+ # Check value, if value are equal, no need to patch
+ if Pcd.DatumType == TAB_VOID:
+ if Pcd.InfDefaultValue == DefaultValue or not DefaultValue:
+ continue
+ # Get the string size from FDF or DSC
+ if DefaultValue[0] == 'L':
+ # Remove L"", but the '\0' must be appended
+ MaxDatumSize = str((len(DefaultValue) - 2) * 2)
+ elif DefaultValue[0] == '{':
+ MaxDatumSize = str(len(DefaultValue.split(',')))
+ else:
+ MaxDatumSize = str(len(DefaultValue) - 1)
+ if DscOverride:
+ Pcd.MaxDatumSize = PatchPcd.MaxDatumSize
+ # If no defined the maximum size in DSC, try to get current size from INF
+ if not Pcd.MaxDatumSize:
+ Pcd.MaxDatumSize = str(len(Pcd.InfDefaultValue.split(',')))
+ else:
+ Base1 = Base2 = 10
+ if Pcd.InfDefaultValue.upper().startswith('0X'):
+ Base1 = 16
+ if DefaultValue.upper().startswith('0X'):
+ Base2 = 16
+ try:
+ PcdValueInImg = int(Pcd.InfDefaultValue, Base1)
+ PcdValueInDscOrFdf = int(DefaultValue, Base2)
+ if PcdValueInImg == PcdValueInDscOrFdf:
+ continue
+ except:
+ continue
+ # Check the Pcd size and data type
+ if Pcd.DatumType == TAB_VOID:
+ if int(MaxDatumSize) > int(Pcd.MaxDatumSize):
+ EdkLogger.error("GenFds", GENFDS_ERROR, "The size of VOID* type PCD '%s.%s' exceeds its maximum size %d bytes." \
+ % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, int(MaxDatumSize) - int(Pcd.MaxDatumSize)))
+ else:
+ if PcdValueInDscOrFdf > MAX_VAL_TYPE[Pcd.DatumType] \
+ or PcdValueInImg > MAX_VAL_TYPE[Pcd.DatumType]:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "The size of %s type PCD '%s.%s' doesn't match its data type." \
+ % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ self.PatchPcds.append((Pcd, DefaultValue))
+
+ self.InfModule = Inf
+ self.PcdIsDriver = Inf.PcdIsDriver
+ self.IsBinaryModule = Inf.IsBinaryModule
+ if len(Inf.Depex.data) > 0 and len(Inf.DepexExpression.data) > 0:
+ self.Depex = True
+
+ GenFdsGlobalVariable.VerboseLogger("BaseName : %s" % self.BaseName)
+ GenFdsGlobalVariable.VerboseLogger("ModuleGuid : %s" % self.ModuleGuid)
+ GenFdsGlobalVariable.VerboseLogger("ModuleType : %s" % self.ModuleType)
+ GenFdsGlobalVariable.VerboseLogger("VersionString : %s" % self.VersionString)
+ GenFdsGlobalVariable.VerboseLogger("InfFileName :%s" % self.InfFileName)
+
+ #
+ # Set OutputPath = ${WorkSpace}\Build\Fv\Ffs\${ModuleGuid}+ ${ModuleName}\
+ #
+ if IsGenFfs:
+ Rule = self.__GetRule__()
+ if GlobalData.gGuidPatternEnd.match(Rule.NameGuid):
+ self.ModuleGuid = Rule.NameGuid
+ self.OutputPath = os.path.join(GenFdsGlobalVariable.FfsDir, \
+ self.ModuleGuid + self.BaseName)
+ if not os.path.exists(self.OutputPath) :
+ os.makedirs(self.OutputPath)
+
+ self.EfiOutputPath, self.EfiDebugPath = self.__GetEFIOutPutPath__()
+ GenFdsGlobalVariable.VerboseLogger( "ModuelEFIPath: " + self.EfiOutputPath)
+
+ ## PatchEfiFile
+ #
+ # Patch EFI file with patch PCD
+ #
+ # @param EfiFile: EFI file needs to be patched.
+ # @retval: Full path of patched EFI file: self.OutputPath + EfiFile base name
+ # If passed in file does not end with efi, return as is
+ #
+ def PatchEfiFile(self, EfiFile, FileType):
+ #
+ # If the module does not have any patches, then return path to input file
+ #
+ if not self.PatchPcds:
+ return EfiFile
+
+ #
+ # Only patch file if FileType is PE32 or ModuleType is USER_DEFINED
+ #
+ if FileType != BINARY_FILE_TYPE_PE32 and self.ModuleType != SUP_MODULE_USER_DEFINED and self.ModuleType != SUP_MODULE_HOST_APPLICATION:
+ return EfiFile
+
+ #
+ # Generate path to patched output file
+ #
+ Basename = os.path.basename(EfiFile)
+ Output = os.path.normpath (os.path.join(self.OutputPath, Basename))
+
+ #
+ # If this file has already been patched, then return the path to the patched file
+ #
+ if self.PatchedBinFile == Output:
+ return Output
+
+ #
+ # If a different file from the same module has already been patched, then generate an error
+ #
+ if self.PatchedBinFile:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ 'Only one binary file can be patched:\n'
+ ' a binary file has been patched: %s\n'
+ ' current file: %s' % (self.PatchedBinFile, EfiFile),
+ File=self.InfFileName)
+
+ #
+ # Copy unpatched file contents to output file location to perform patching
+ #
+ CopyLongFilePath(EfiFile, Output)
+
+ #
+ # Apply patches to patched output file
+ #
+ for Pcd, Value in self.PatchPcds:
+ RetVal, RetStr = PatchBinaryFile(Output, int(Pcd.Offset, 0), Pcd.DatumType, Value, Pcd.MaxDatumSize)
+ if RetVal:
+ EdkLogger.error("GenFds", GENFDS_ERROR, RetStr, File=self.InfFileName)
+
+ #
+ # Save the path of the patched output file
+ #
+ self.PatchedBinFile = Output
+
+ #
+ # Return path to patched output file
+ #
+ return Output
+
+ ## GenFfs() method
+ #
+ # Generate FFS
+ #
+ # @param self The object pointer
+ # @param Dict dictionary contains macro and value pair
+ # @param FvChildAddr Array of the inside FvImage base address
+ # @param FvParentAddr Parent Fv base address
+ # @retval string Generated FFS file name
+ #
+ def GenFfs(self, Dict = None, FvChildAddr = [], FvParentAddr=None, IsMakefile=False, FvName=None):
+ #
+ # Parse Inf file get Module related information
+ #
+ if Dict is None:
+ Dict = {}
+ self.__InfParse__(Dict, IsGenFfs=True)
+ Arch = self.GetCurrentArch()
+ SrcFile = mws.join( GenFdsGlobalVariable.WorkSpaceDir, self.InfFileName);
+ DestFile = os.path.join( self.OutputPath, self.ModuleGuid + '.ffs')
+
+ SrcFileDir = "."
+ SrcPath = os.path.dirname(SrcFile)
+ SrcFileName = os.path.basename(SrcFile)
+ SrcFileBase, SrcFileExt = os.path.splitext(SrcFileName)
+ DestPath = os.path.dirname(DestFile)
+ DestFileName = os.path.basename(DestFile)
+ DestFileBase, DestFileExt = os.path.splitext(DestFileName)
+ self.MacroDict = {
+ # source file
+ "${src}" : SrcFile,
+ "${s_path}" : SrcPath,
+ "${s_dir}" : SrcFileDir,
+ "${s_name}" : SrcFileName,
+ "${s_base}" : SrcFileBase,
+ "${s_ext}" : SrcFileExt,
+ # destination file
+ "${dst}" : DestFile,
+ "${d_path}" : DestPath,
+ "${d_name}" : DestFileName,
+ "${d_base}" : DestFileBase,
+ "${d_ext}" : DestFileExt
+ }
+ #
+ # Allow binary type module not specify override rule in FDF file.
+ #
+ if len(self.BinFileList) > 0:
+ if self.Rule is None or self.Rule == "":
+ self.Rule = "BINARY"
+
+ if not IsMakefile and GenFdsGlobalVariable.EnableGenfdsMultiThread and self.Rule != 'BINARY':
+ IsMakefile = True
+ #
+ # Get the rule of how to generate Ffs file
+ #
+ Rule = self.__GetRule__()
+ GenFdsGlobalVariable.VerboseLogger( "Packing binaries from inf file : %s" %self.InfFileName)
+ #
+ # Convert Fv File Type for PI1.1 SMM driver.
+ #
+ if self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER and int(self.PiSpecVersion, 16) >= 0x0001000A:
+ if Rule.FvFileType == 'DRIVER':
+ Rule.FvFileType = 'SMM'
+ #
+ # Framework SMM Driver has no SMM FV file type
+ #
+ if self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER and int(self.PiSpecVersion, 16) < 0x0001000A:
+ if Rule.FvFileType == 'SMM' or Rule.FvFileType == SUP_MODULE_SMM_CORE:
+ EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "Framework SMM module doesn't support SMM or SMM_CORE FV file type", File=self.InfFileName)
+ #
+ # For the rule only has simpleFile
+ #
+ MakefilePath = None
+ if self.IsBinaryModule:
+ IsMakefile = False
+ if IsMakefile:
+ PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
+ if self.OverrideGuid:
+ PathClassObj = ProcessDuplicatedInf(PathClassObj, self.OverrideGuid, GenFdsGlobalVariable.WorkSpaceDir)
+ MakefilePath = PathClassObj.Path, Arch
+ if isinstance (Rule, RuleSimpleFile.RuleSimpleFile):
+ SectionOutputList = self.__GenSimpleFileSection__(Rule, IsMakefile=IsMakefile)
+ FfsOutput = self.__GenSimpleFileFfs__(Rule, SectionOutputList, MakefilePath=MakefilePath)
+ return FfsOutput
+ #
+ # For Rule has ComplexFile
+ #
+ elif isinstance(Rule, RuleComplexFile.RuleComplexFile):
+ InputSectList, InputSectAlignments = self.__GenComplexFileSection__(Rule, FvChildAddr, FvParentAddr, IsMakefile=IsMakefile)
+ FfsOutput = self.__GenComplexFileFfs__(Rule, InputSectList, InputSectAlignments, MakefilePath=MakefilePath)
+ return FfsOutput
+
+ ## __ExtendMacro__() method
+ #
+ # Replace macro with its value
+ #
+ # @param self The object pointer
+ # @param String The string to be replaced
+ # @retval string Macro replaced string
+ #
+ def __ExtendMacro__ (self, String):
+ MacroDict = {
+ '$(INF_OUTPUT)' : self.EfiOutputPath,
+ '$(MODULE_NAME)' : self.BaseName,
+ '$(BUILD_NUMBER)': self.BuildNum,
+ '$(INF_VERSION)' : self.VersionString,
+ '$(NAMED_GUID)' : self.ModuleGuid
+ }
+ String = GenFdsGlobalVariable.MacroExtend(String, MacroDict)
+ String = GenFdsGlobalVariable.MacroExtend(String, self.MacroDict)
+ return String
+
+ ## __GetRule__() method
+ #
+ # Get correct rule for generating FFS for this INF
+ #
+ # @param self The object pointer
+ # @retval Rule Rule object
+ #
+ def __GetRule__ (self) :
+ CurrentArchList = []
+ if self.CurrentArch is None:
+ CurrentArchList = ['common']
+ else:
+ CurrentArchList.append(self.CurrentArch)
+
+ for CurrentArch in CurrentArchList:
+ RuleName = 'RULE' + \
+ '.' + \
+ CurrentArch.upper() + \
+ '.' + \
+ self.ModuleType.upper()
+ if self.Rule is not None:
+ RuleName = RuleName + \
+ '.' + \
+ self.Rule.upper()
+
+ Rule = GenFdsGlobalVariable.FdfParser.Profile.RuleDict.get(RuleName)
+ if Rule is not None:
+ GenFdsGlobalVariable.VerboseLogger ("Want To Find Rule Name is : " + RuleName)
+ return Rule
+
+ RuleName = 'RULE' + \
+ '.' + \
+ TAB_COMMON + \
+ '.' + \
+ self.ModuleType.upper()
+
+ if self.Rule is not None:
+ RuleName = RuleName + \
+ '.' + \
+ self.Rule.upper()
+
+ GenFdsGlobalVariable.VerboseLogger ('Trying to apply common rule %s for INF %s' % (RuleName, self.InfFileName))
+
+ Rule = GenFdsGlobalVariable.FdfParser.Profile.RuleDict.get(RuleName)
+ if Rule is not None:
+ GenFdsGlobalVariable.VerboseLogger ("Want To Find Rule Name is : " + RuleName)
+ return Rule
+
+ if Rule is None :
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'Don\'t Find common rule %s for INF %s' \
+ % (RuleName, self.InfFileName))
+
+ ## __GetPlatformArchList__() method
+ #
+ # Get Arch list this INF built under
+ #
+ # @param self The object pointer
+ # @retval list Arch list
+ #
+ def __GetPlatformArchList__(self):
+
+ InfFileKey = os.path.normpath(mws.join(GenFdsGlobalVariable.WorkSpaceDir, self.InfFileName))
+ DscArchList = []
+ for Arch in GenFdsGlobalVariable.ArchList :
+ PlatformDataBase = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
+ if PlatformDataBase is not None:
+ if InfFileKey in PlatformDataBase.Modules:
+ DscArchList.append (Arch)
+ else:
+ #
+ # BaseTools support build same module more than once, the module path with FILE_GUID overridden has
+ # the file name FILE_GUIDmodule.inf, then PlatformDataBase.Modules use FILE_GUIDmodule.inf as key,
+ # but the path (self.MetaFile.Path) is the real path
+ #
+ for key in PlatformDataBase.Modules:
+ if InfFileKey == str((PlatformDataBase.Modules[key]).MetaFile.Path):
+ DscArchList.append (Arch)
+ break
+
+ return DscArchList
+
+ ## GetCurrentArch() method
+ #
+ # Get Arch list of the module from this INF is to be placed into flash
+ #
+ # @param self The object pointer
+ # @retval list Arch list
+ #
+ def GetCurrentArch(self) :
+
+ TargetArchList = GenFdsGlobalVariable.ArchList
+
+ PlatformArchList = self.__GetPlatformArchList__()
+
+ CurArchList = TargetArchList
+ if PlatformArchList != []:
+ CurArchList = list(set (TargetArchList) & set (PlatformArchList))
+ GenFdsGlobalVariable.VerboseLogger ("Valid target architecture(s) is : " + " ".join(CurArchList))
+
+ ArchList = []
+ if self.KeyStringList != []:
+ for Key in self.KeyStringList:
+ Key = GenFdsGlobalVariable.MacroExtend(Key)
+ Target, Tag, Arch = Key.split('_')
+ if Arch in CurArchList:
+ ArchList.append(Arch)
+ if Target not in self.TargetOverrideList:
+ self.TargetOverrideList.append(Target)
+ else:
+ ArchList = CurArchList
+
+ UseArchList = TargetArchList
+ if self.UseArch is not None:
+ UseArchList = []
+ UseArchList.append(self.UseArch)
+ ArchList = list(set (UseArchList) & set (ArchList))
+
+ self.InfFileName = NormPath(self.InfFileName)
+ if len(PlatformArchList) == 0:
+ self.InDsc = False
+ PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
+ ErrorCode, ErrorInfo = PathClassObj.Validate(".inf")
+ if ErrorCode != 0:
+ EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
+ if len(ArchList) == 1:
+ Arch = ArchList[0]
+ return Arch
+ elif len(ArchList) > 1:
+ if len(PlatformArchList) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "GenFds command line option has multiple ARCHs %s. Not able to determine which ARCH is valid for Module %s !" % (str(ArchList), self.InfFileName))
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Module built under multiple ARCHs %s. Not able to determine which output to put into flash for Module %s !" % (str(ArchList), self.InfFileName))
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s appears under ARCH %s in platform %s, but current deduced ARCH is %s, so NO build output could be put into flash." \
+ % (self.InfFileName, str(PlatformArchList), GenFdsGlobalVariable.ActivePlatform, str(set (UseArchList) & set (TargetArchList))))
+
+ ## __GetEFIOutPutPath__() method
+ #
+ # Get the output path for generated files
+ #
+ # @param self The object pointer
+ # @retval string Path that output files from this INF go to
+ #
+ def __GetEFIOutPutPath__(self):
+ Arch = ''
+ OutputPath = ''
+ DebugPath = ''
+ (ModulePath, FileName) = os.path.split(self.InfFileName)
+ Index = FileName.rfind('.')
+ FileName = FileName[0:Index]
+ if self.OverrideGuid:
+ FileName = self.OverrideGuid
+ Arch = "NoneArch"
+ if self.CurrentArch is not None:
+ Arch = self.CurrentArch
+
+ OutputPath = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch],
+ Arch,
+ ModulePath,
+ FileName,
+ 'OUTPUT'
+ )
+ DebugPath = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch],
+ Arch,
+ ModulePath,
+ FileName,
+ 'DEBUG'
+ )
+ OutputPath = os.path.realpath(OutputPath)
+ DebugPath = os.path.realpath(DebugPath)
+ return OutputPath, DebugPath
+
+ ## __GenSimpleFileSection__() method
+ #
+ # Generate section by specified file name or a list of files with file extension
+ #
+ # @param self The object pointer
+ # @param Rule The rule object used to generate section
+ # @retval string File name of the generated section file
+ #
+ def __GenSimpleFileSection__(self, Rule, IsMakefile = False):
+ #
+ # Prepare the parameter of GenSection
+ #
+ FileList = []
+ OutputFileList = []
+ GenSecInputFile = None
+ if Rule.FileName is not None:
+ GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
+ if os.path.isabs(GenSecInputFile):
+ GenSecInputFile = os.path.normpath(GenSecInputFile)
+ else:
+ GenSecInputFile = os.path.normpath(os.path.join(self.EfiOutputPath, GenSecInputFile))
+ else:
+ FileList, IsSect = Section.Section.GetFileList(self, '', Rule.FileExtension)
+
+ Index = 1
+ SectionType = Rule.SectionType
+ #
+ # Convert Fv Section Type for PI1.1 SMM driver.
+ #
+ if self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER and int(self.PiSpecVersion, 16) >= 0x0001000A:
+ if SectionType == BINARY_FILE_TYPE_DXE_DEPEX:
+ SectionType = BINARY_FILE_TYPE_SMM_DEPEX
+ #
+ # Framework SMM Driver has no SMM_DEPEX section type
+ #
+ if self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER and int(self.PiSpecVersion, 16) < 0x0001000A:
+ if SectionType == BINARY_FILE_TYPE_SMM_DEPEX:
+ EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "Framework SMM module doesn't support SMM_DEPEX section type", File=self.InfFileName)
+ NoStrip = True
+ if self.ModuleType in (SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM):
+ if self.KeepReloc is not None:
+ NoStrip = self.KeepReloc
+ elif Rule.KeepReloc is not None:
+ NoStrip = Rule.KeepReloc
+ elif self.ShadowFromInfFile is not None:
+ NoStrip = self.ShadowFromInfFile
+
+ if FileList != [] :
+ for File in FileList:
+
+ SecNum = '%d' %Index
+ GenSecOutputFile= self.__ExtendMacro__(Rule.NameGuid) + \
+ SectionSuffix[SectionType] + SUP_MODULE_SEC + SecNum
+ Index = Index + 1
+ OutputFile = os.path.join(self.OutputPath, GenSecOutputFile)
+ File = GenFdsGlobalVariable.MacroExtend(File, Dict, self.CurrentArch)
+
+ #Get PE Section alignment when align is set to AUTO
+ if self.Alignment == 'Auto' and (SectionType == BINARY_FILE_TYPE_PE32 or SectionType == BINARY_FILE_TYPE_TE):
+ ImageObj = PeImageClass (File)
+ if ImageObj.SectionAlignment < 0x400:
+ self.Alignment = str (ImageObj.SectionAlignment)
+ elif ImageObj.SectionAlignment < 0x100000:
+ self.Alignment = str (ImageObj.SectionAlignment // 0x400) + 'K'
+ else:
+ self.Alignment = str (ImageObj.SectionAlignment // 0x100000) + 'M'
+
+ if not NoStrip:
+ FileBeforeStrip = os.path.join(self.OutputPath, ModuleName + '.reloc')
+ if not os.path.exists(FileBeforeStrip) or \
+ (os.path.getmtime(File) > os.path.getmtime(FileBeforeStrip)):
+ CopyLongFilePath(File, FileBeforeStrip)
+ StrippedFile = os.path.join(self.OutputPath, ModuleName + '.stipped')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ StrippedFile,
+ [File],
+ Strip=True,
+ IsMakefile=IsMakefile
+ )
+ File = StrippedFile
+
+ if SectionType == BINARY_FILE_TYPE_TE:
+ TeFile = os.path.join( self.OutputPath, self.ModuleGuid + 'Te.raw')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ TeFile,
+ [File],
+ Type='te',
+ IsMakefile=IsMakefile
+ )
+ File = TeFile
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [File], Section.Section.SectionType[SectionType], IsMakefile=IsMakefile)
+ OutputFileList.append(OutputFile)
+ else:
+ SecNum = '%d' %Index
+ GenSecOutputFile= self.__ExtendMacro__(Rule.NameGuid) + \
+ SectionSuffix[SectionType] + SUP_MODULE_SEC + SecNum
+ OutputFile = os.path.join(self.OutputPath, GenSecOutputFile)
+ GenSecInputFile = GenFdsGlobalVariable.MacroExtend(GenSecInputFile, Dict, self.CurrentArch)
+
+ #Get PE Section alignment when align is set to AUTO
+ if self.Alignment == 'Auto' and (SectionType == BINARY_FILE_TYPE_PE32 or SectionType == BINARY_FILE_TYPE_TE):
+ ImageObj = PeImageClass (GenSecInputFile)
+ if ImageObj.SectionAlignment < 0x400:
+ self.Alignment = str (ImageObj.SectionAlignment)
+ elif ImageObj.SectionAlignment < 0x100000:
+ self.Alignment = str (ImageObj.SectionAlignment // 0x400) + 'K'
+ else:
+ self.Alignment = str (ImageObj.SectionAlignment // 0x100000) + 'M'
+
+ if not NoStrip:
+ FileBeforeStrip = os.path.join(self.OutputPath, ModuleName + '.reloc')
+ if not os.path.exists(FileBeforeStrip) or \
+ (os.path.getmtime(GenSecInputFile) > os.path.getmtime(FileBeforeStrip)):
+ CopyLongFilePath(GenSecInputFile, FileBeforeStrip)
+
+ StrippedFile = os.path.join(self.OutputPath, ModuleName + '.stipped')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ StrippedFile,
+ [GenSecInputFile],
+ Strip=True,
+ IsMakefile=IsMakefile
+ )
+ GenSecInputFile = StrippedFile
+
+ if SectionType == BINARY_FILE_TYPE_TE:
+ TeFile = os.path.join( self.OutputPath, self.ModuleGuid + 'Te.raw')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ TeFile,
+ [GenSecInputFile],
+ Type='te',
+ IsMakefile=IsMakefile
+ )
+ GenSecInputFile = TeFile
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [GenSecInputFile], Section.Section.SectionType[SectionType], IsMakefile=IsMakefile)
+ OutputFileList.append(OutputFile)
+
+ return OutputFileList
+
+ ## __GenSimpleFileFfs__() method
+ #
+ # Generate FFS
+ #
+ # @param self The object pointer
+ # @param Rule The rule object used to generate section
+ # @param InputFileList The output file list from GenSection
+ # @retval string Generated FFS file name
+ #
+ def __GenSimpleFileFfs__(self, Rule, InputFileList, MakefilePath = None):
+ FfsOutput = self.OutputPath + \
+ os.sep + \
+ self.__ExtendMacro__(Rule.NameGuid) + \
+ '.ffs'
+
+ GenFdsGlobalVariable.VerboseLogger(self.__ExtendMacro__(Rule.NameGuid))
+ InputSection = []
+ SectionAlignments = []
+ for InputFile in InputFileList:
+ InputSection.append(InputFile)
+ SectionAlignments.append(Rule.SectAlignment)
+
+ if Rule.NameGuid is not None and Rule.NameGuid.startswith('PCD('):
+ PcdValue = GenFdsGlobalVariable.GetPcdValue(Rule.NameGuid)
+ if len(PcdValue) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
+ % (Rule.NameGuid))
+ if PcdValue.startswith('{'):
+ PcdValue = GuidStructureByteArrayToGuidString(PcdValue)
+ RegistryGuidStr = PcdValue
+ if len(RegistryGuidStr) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'GUID value for %s in wrong format.' \
+ % (Rule.NameGuid))
+ self.ModuleGuid = RegistryGuidStr
+
+ GenFdsGlobalVariable.GenerateFfs(FfsOutput, InputSection,
+ FdfFvFileTypeToFileType[Rule.FvFileType],
+ self.ModuleGuid, Fixed=Rule.Fixed,
+ CheckSum=Rule.CheckSum, Align=Rule.Alignment,
+ SectionAlign=SectionAlignments,
+ MakefilePath=MakefilePath
+ )
+ return FfsOutput
+
+ ## __GenComplexFileSection__() method
+ #
+ # Generate section by sections in Rule
+ #
+ # @param self The object pointer
+ # @param Rule The rule object used to generate section
+ # @param FvChildAddr Array of the inside FvImage base address
+ # @param FvParentAddr Parent Fv base address
+ # @retval string File name of the generated section file
+ #
+ def __GenComplexFileSection__(self, Rule, FvChildAddr, FvParentAddr, IsMakefile = False):
+ if self.ModuleType in (SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, SUP_MODULE_MM_CORE_STANDALONE):
+ if Rule.KeepReloc is not None:
+ self.KeepRelocFromRule = Rule.KeepReloc
+ SectFiles = []
+ SectAlignments = []
+ Index = 1
+ HasGeneratedFlag = False
+ if self.PcdIsDriver == 'PEI_PCD_DRIVER':
+ if self.IsBinaryModule:
+ PcdExDbFileName = os.path.join(GenFdsGlobalVariable.FvDir, "PEIPcdDataBase.raw")
+ else:
+ PcdExDbFileName = os.path.join(self.EfiOutputPath, "PEIPcdDataBase.raw")
+ PcdExDbSecName = os.path.join(self.OutputPath, "PEIPcdDataBaseSec.raw")
+ GenFdsGlobalVariable.GenerateSection(PcdExDbSecName,
+ [PcdExDbFileName],
+ "EFI_SECTION_RAW",
+ IsMakefile = IsMakefile
+ )
+ SectFiles.append(PcdExDbSecName)
+ SectAlignments.append(None)
+ elif self.PcdIsDriver == 'DXE_PCD_DRIVER':
+ if self.IsBinaryModule:
+ PcdExDbFileName = os.path.join(GenFdsGlobalVariable.FvDir, "DXEPcdDataBase.raw")
+ else:
+ PcdExDbFileName = os.path.join(self.EfiOutputPath, "DXEPcdDataBase.raw")
+ PcdExDbSecName = os.path.join(self.OutputPath, "DXEPcdDataBaseSec.raw")
+ GenFdsGlobalVariable.GenerateSection(PcdExDbSecName,
+ [PcdExDbFileName],
+ "EFI_SECTION_RAW",
+ IsMakefile = IsMakefile
+ )
+ SectFiles.append(PcdExDbSecName)
+ SectAlignments.append(None)
+ for Sect in Rule.SectionList:
+ SecIndex = '%d' %Index
+ SectList = []
+ #
+ # Convert Fv Section Type for PI1.1 SMM driver.
+ #
+ if self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER and int(self.PiSpecVersion, 16) >= 0x0001000A:
+ if Sect.SectionType == BINARY_FILE_TYPE_DXE_DEPEX:
+ Sect.SectionType = BINARY_FILE_TYPE_SMM_DEPEX
+ #
+ # Framework SMM Driver has no SMM_DEPEX section type
+ #
+ if self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER and int(self.PiSpecVersion, 16) < 0x0001000A:
+ if Sect.SectionType == BINARY_FILE_TYPE_SMM_DEPEX:
+ EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "Framework SMM module doesn't support SMM_DEPEX section type", File=self.InfFileName)
+ #
+ # process the inside FvImage from FvSection or GuidSection
+ #
+ if FvChildAddr != []:
+ if isinstance(Sect, FvImageSection):
+ Sect.FvAddr = FvChildAddr.pop(0)
+ elif isinstance(Sect, GuidSection):
+ Sect.FvAddr = FvChildAddr
+ if FvParentAddr is not None and isinstance(Sect, GuidSection):
+ Sect.FvParentAddr = FvParentAddr
+
+ if Rule.KeyStringList != []:
+ SectList, Align = Sect.GenSection(self.OutputPath, self.ModuleGuid, SecIndex, Rule.KeyStringList, self, IsMakefile = IsMakefile)
+ else :
+ SectList, Align = Sect.GenSection(self.OutputPath, self.ModuleGuid, SecIndex, self.KeyStringList, self, IsMakefile = IsMakefile)
+
+ if not HasGeneratedFlag:
+ UniVfrOffsetFileSection = ""
+ ModuleFileName = mws.join(GenFdsGlobalVariable.WorkSpaceDir, self.InfFileName)
+ InfData = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(ModuleFileName), self.CurrentArch]
+ #
+ # Search the source list in InfData to find if there are .vfr file exist.
+ #
+ VfrUniBaseName = {}
+ VfrUniOffsetList = []
+ for SourceFile in InfData.Sources:
+ if SourceFile.Type.upper() == ".VFR" :
+ #
+ # search the .map file to find the offset of vfr binary in the PE32+/TE file.
+ #
+ VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
+ if SourceFile.Type.upper() == ".UNI" :
+ #
+ # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
+ #
+ VfrUniBaseName["UniOffsetName"] = (self.BaseName + "Strings")
+
+
+ if len(VfrUniBaseName) > 0:
+ if IsMakefile:
+ if InfData.BuildType != 'UEFI_HII':
+ UniVfrOffsetFileName = os.path.join(self.OutputPath, self.BaseName + '.offset')
+ UniVfrOffsetFileSection = os.path.join(self.OutputPath, self.BaseName + 'Offset' + '.raw')
+ UniVfrOffsetFileNameList = []
+ UniVfrOffsetFileNameList.append(UniVfrOffsetFileName)
+ TrimCmd = "Trim --Vfr-Uni-Offset -o %s --ModuleName=%s --DebugDir=%s " % (UniVfrOffsetFileName, self.BaseName, self.EfiDebugPath)
+ GenFdsGlobalVariable.SecCmdList.append(TrimCmd)
+ GenFdsGlobalVariable.GenerateSection(UniVfrOffsetFileSection,
+ [UniVfrOffsetFileName],
+ "EFI_SECTION_RAW",
+ IsMakefile = True
+ )
+ else:
+ VfrUniOffsetList = self.__GetBuildOutputMapFileVfrUniInfo(VfrUniBaseName)
+ #
+ # Generate the Raw data of raw section
+ #
+ if VfrUniOffsetList:
+ UniVfrOffsetFileName = os.path.join(self.OutputPath, self.BaseName + '.offset')
+ UniVfrOffsetFileSection = os.path.join(self.OutputPath, self.BaseName + 'Offset' + '.raw')
+ FfsInfStatement.__GenUniVfrOffsetFile (VfrUniOffsetList, UniVfrOffsetFileName)
+ UniVfrOffsetFileNameList = []
+ UniVfrOffsetFileNameList.append(UniVfrOffsetFileName)
+ """Call GenSection"""
+
+ GenFdsGlobalVariable.GenerateSection(UniVfrOffsetFileSection,
+ UniVfrOffsetFileNameList,
+ "EFI_SECTION_RAW"
+ )
+ #os.remove(UniVfrOffsetFileName)
+ if UniVfrOffsetFileSection:
+ SectList.append(UniVfrOffsetFileSection)
+ HasGeneratedFlag = True
+
+ for SecName in SectList :
+ SectFiles.append(SecName)
+ SectAlignments.append(Align)
+ Index = Index + 1
+ return SectFiles, SectAlignments
+
+ ## __GenComplexFileFfs__() method
+ #
+ # Generate FFS
+ #
+ # @param self The object pointer
+ # @param Rule The rule object used to generate section
+ # @param InputFileList The output file list from GenSection
+ # @retval string Generated FFS file name
+ #
+ def __GenComplexFileFfs__(self, Rule, InputFile, Alignments, MakefilePath = None):
+
+ if Rule.NameGuid is not None and Rule.NameGuid.startswith('PCD('):
+ PcdValue = GenFdsGlobalVariable.GetPcdValue(Rule.NameGuid)
+ if len(PcdValue) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
+ % (Rule.NameGuid))
+ if PcdValue.startswith('{'):
+ PcdValue = GuidStructureByteArrayToGuidString(PcdValue)
+ RegistryGuidStr = PcdValue
+ if len(RegistryGuidStr) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'GUID value for %s in wrong format.' \
+ % (Rule.NameGuid))
+ self.ModuleGuid = RegistryGuidStr
+
+ FfsOutput = os.path.join( self.OutputPath, self.ModuleGuid + '.ffs')
+ GenFdsGlobalVariable.GenerateFfs(FfsOutput, InputFile,
+ FdfFvFileTypeToFileType[Rule.FvFileType],
+ self.ModuleGuid, Fixed=Rule.Fixed,
+ CheckSum=Rule.CheckSum, Align=Rule.Alignment,
+ SectionAlign=Alignments,
+ MakefilePath=MakefilePath
+ )
+ return FfsOutput
+
+ ## __GetBuildOutputMapFileVfrUniInfo() method
+ #
+ # Find the offset of UNI/INF object offset in the EFI image file.
+ #
+ # @param self The object pointer
+ # @param VfrUniBaseName A name list contain the UNI/INF object name.
+ # @retval RetValue A list contain offset of UNI/INF object.
+ #
+ def __GetBuildOutputMapFileVfrUniInfo(self, VfrUniBaseName):
+ MapFileName = os.path.join(self.EfiOutputPath, self.BaseName + ".map")
+ EfiFileName = os.path.join(self.EfiOutputPath, self.BaseName + ".efi")
+ return GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
+
+ ## __GenUniVfrOffsetFile() method
+ #
+ # Generate the offset file for the module which contain VFR or UNI file.
+ #
+ # @param VfrUniOffsetList A list contain the VFR/UNI offsets in the EFI image file.
+ # @param UniVfrOffsetFileName The output offset file name.
+ #
+ @staticmethod
+ def __GenUniVfrOffsetFile(VfrUniOffsetList, UniVfrOffsetFileName):
+
+ # Use a instance of StringIO to cache data
+ fStringIO = BytesIO()
+
+ for Item in VfrUniOffsetList:
+ if (Item[0].find("Strings") != -1):
+ #
+ # UNI offset in image.
+ # GUID + Offset
+ # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
+ #
+ UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
+ fStringIO.write(UniGuid)
+ UniValue = pack ('Q', int (Item[1], 16))
+ fStringIO.write (UniValue)
+ else:
+ #
+ # VFR binary offset in image.
+ # GUID + Offset
+ # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
+ #
+ VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
+ fStringIO.write(VfrGuid)
+ type (Item[1])
+ VfrValue = pack ('Q', int (Item[1], 16))
+ fStringIO.write (VfrValue)
+
+ #
+ # write data into file.
+ #
+ try :
+ SaveFileOnChange(UniVfrOffsetFileName, fStringIO.getvalue())
+ except:
+ EdkLogger.error("GenFds", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %UniVfrOffsetFileName, None)
+
+ fStringIO.close ()
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Fv.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Fv.py
new file mode 100755
index 00000000..798eb1f5
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Fv.py
@@ -0,0 +1,431 @@
+## @file
+# process FV generation
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.LongFilePathOs as os
+import subprocess
+from io import BytesIO
+from struct import *
+from . import FfsFileStatement
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+from Common.Misc import SaveFileOnChange, PackGUID
+from Common.LongFilePathSupport import CopyLongFilePath
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.DataType import *
+
+FV_UI_EXT_ENTY_GUID = 'A67DF1FA-8DE8-4E98-AF09-4BDF2EFFBC7C'
+
+## generate FV
+#
+#
+class FV (object):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self, Name=None):
+ self.UiFvName = Name
+ self.CreateFileName = None
+ self.BlockSizeList = []
+ self.DefineVarDict = {}
+ self.SetVarDict = {}
+ self.FvAlignment = None
+ self.FvAttributeDict = {}
+ self.FvNameGuid = None
+ self.FvNameString = None
+ self.AprioriSectionList = []
+ self.FfsList = []
+ self.BsBaseAddress = None
+ self.RtBaseAddress = None
+ self.FvInfFile = None
+ self.FvAddressFile = None
+ self.BaseAddress = None
+ self.InfFileName = None
+ self.FvAddressFileName = None
+ self.CapsuleName = None
+ self.FvBaseAddress = None
+ self.FvForceRebase = None
+ self.FvRegionInFD = None
+ self.UsedSizeEnable = False
+ self.FvExtEntryTypeValue = []
+ self.FvExtEntryType = []
+ self.FvExtEntryData = []
+ ## AddToBuffer()
+ #
+ # Generate Fv and add it to the Buffer
+ #
+ # @param self The object pointer
+ # @param Buffer The buffer generated FV data will be put
+ # @param BaseAddress base address of FV
+ # @param BlockSize block size of FV
+ # @param BlockNum How many blocks in FV
+ # @param ErasePolarity Flash erase polarity
+ # @param MacroDict macro value pair
+ # @retval string Generated FV file path
+ #
+ def AddToBuffer (self, Buffer, BaseAddress=None, BlockSize= None, BlockNum=None, ErasePloarity='1', MacroDict = None, Flag=False):
+ if BaseAddress is None and self.UiFvName.upper() + 'fv' in GenFdsGlobalVariable.ImageBinDict:
+ return GenFdsGlobalVariable.ImageBinDict[self.UiFvName.upper() + 'fv']
+ if MacroDict is None:
+ MacroDict = {}
+
+ #
+ # Check whether FV in Capsule is in FD flash region.
+ # If yes, return error. Doesn't support FV in Capsule image is also in FD flash region.
+ #
+ if self.CapsuleName is not None:
+ for FdObj in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
+ for RegionObj in FdObj.RegionList:
+ if RegionObj.RegionType == BINARY_FILE_TYPE_FV:
+ for RegionData in RegionObj.RegionDataList:
+ if RegionData.endswith(".fv"):
+ continue
+ elif RegionData.upper() + 'fv' in GenFdsGlobalVariable.ImageBinDict:
+ continue
+ elif self.UiFvName.upper() == RegionData.upper():
+ GenFdsGlobalVariable.ErrorLogger("Capsule %s in FD region can't contain a FV %s in FD region." % (self.CapsuleName, self.UiFvName.upper()))
+ if not Flag:
+ GenFdsGlobalVariable.InfLogger( "\nGenerating %s FV" %self.UiFvName)
+ GenFdsGlobalVariable.LargeFileInFvFlags.append(False)
+ FFSGuid = None
+
+ if self.FvBaseAddress is not None:
+ BaseAddress = self.FvBaseAddress
+ if not Flag:
+ self._InitializeInf(BaseAddress, BlockSize, BlockNum, ErasePloarity)
+ #
+ # First Process the Apriori section
+ #
+ MacroDict.update(self.DefineVarDict)
+
+ GenFdsGlobalVariable.VerboseLogger('First generate Apriori file !')
+ FfsFileList = []
+ for AprSection in self.AprioriSectionList:
+ FileName = AprSection.GenFfs (self.UiFvName, MacroDict, IsMakefile=Flag)
+ FfsFileList.append(FileName)
+ # Add Apriori file name to Inf file
+ if not Flag:
+ self.FvInfFile.append("EFI_FILE_NAME = " + \
+ FileName + \
+ TAB_LINE_BREAK)
+
+ # Process Modules in FfsList
+ for FfsFile in self.FfsList:
+ if Flag:
+ if isinstance(FfsFile, FfsFileStatement.FileStatement):
+ continue
+ if GenFdsGlobalVariable.EnableGenfdsMultiThread and GenFdsGlobalVariable.ModuleFile and GenFdsGlobalVariable.ModuleFile.Path.find(os.path.normpath(FfsFile.InfFileName)) == -1:
+ continue
+ FileName = FfsFile.GenFfs(MacroDict, FvParentAddr=BaseAddress, IsMakefile=Flag, FvName=self.UiFvName)
+ FfsFileList.append(FileName)
+ if not Flag:
+ self.FvInfFile.append("EFI_FILE_NAME = " + \
+ FileName + \
+ TAB_LINE_BREAK)
+ if not Flag:
+ FvInfFile = ''.join(self.FvInfFile)
+ SaveFileOnChange(self.InfFileName, FvInfFile, False)
+ #
+ # Call GenFv tool
+ #
+ FvOutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName)
+ FvOutputFile = FvOutputFile + '.Fv'
+ # BUGBUG: FvOutputFile could be specified from FDF file (FV section, CreateFile statement)
+ if self.CreateFileName is not None:
+ FvOutputFile = self.CreateFileName
+
+ if Flag:
+ GenFdsGlobalVariable.ImageBinDict[self.UiFvName.upper() + 'fv'] = FvOutputFile
+ return FvOutputFile
+
+ FvInfoFileName = os.path.join(GenFdsGlobalVariable.FfsDir, self.UiFvName + '.inf')
+ if not Flag:
+ CopyLongFilePath(GenFdsGlobalVariable.FvAddressFileName, FvInfoFileName)
+ OrigFvInfo = None
+ if os.path.exists (FvInfoFileName):
+ OrigFvInfo = open(FvInfoFileName, 'r').read()
+ if GenFdsGlobalVariable.LargeFileInFvFlags[-1]:
+ FFSGuid = GenFdsGlobalVariable.EFI_FIRMWARE_FILE_SYSTEM3_GUID
+ GenFdsGlobalVariable.GenerateFirmwareVolume(
+ FvOutputFile,
+ [self.InfFileName],
+ AddressFile=FvInfoFileName,
+ FfsList=FfsFileList,
+ ForceRebase=self.FvForceRebase,
+ FileSystemGuid=FFSGuid
+ )
+
+ NewFvInfo = None
+ if os.path.exists (FvInfoFileName):
+ NewFvInfo = open(FvInfoFileName, 'r').read()
+ if NewFvInfo is not None and NewFvInfo != OrigFvInfo:
+ FvChildAddr = []
+ AddFileObj = open(FvInfoFileName, 'r')
+ AddrStrings = AddFileObj.readlines()
+ AddrKeyFound = False
+ for AddrString in AddrStrings:
+ if AddrKeyFound:
+ #get base address for the inside FvImage
+ FvChildAddr.append (AddrString)
+ elif AddrString.find ("[FV_BASE_ADDRESS]") != -1:
+ AddrKeyFound = True
+ AddFileObj.close()
+
+ if FvChildAddr != []:
+ # Update Ffs again
+ for FfsFile in self.FfsList:
+ FileName = FfsFile.GenFfs(MacroDict, FvChildAddr, BaseAddress, IsMakefile=Flag, FvName=self.UiFvName)
+
+ if GenFdsGlobalVariable.LargeFileInFvFlags[-1]:
+ FFSGuid = GenFdsGlobalVariable.EFI_FIRMWARE_FILE_SYSTEM3_GUID;
+ #Update GenFv again
+ GenFdsGlobalVariable.GenerateFirmwareVolume(
+ FvOutputFile,
+ [self.InfFileName],
+ AddressFile=FvInfoFileName,
+ FfsList=FfsFileList,
+ ForceRebase=self.FvForceRebase,
+ FileSystemGuid=FFSGuid
+ )
+
+ #
+ # Write the Fv contents to Buffer
+ #
+ if os.path.isfile(FvOutputFile) and os.path.getsize(FvOutputFile) >= 0x48:
+ FvFileObj = open(FvOutputFile, 'rb')
+ # PI FvHeader is 0x48 byte
+ FvHeaderBuffer = FvFileObj.read(0x48)
+ Signature = FvHeaderBuffer[0x28:0x32]
+ if Signature and Signature.startswith(b'_FVH'):
+ GenFdsGlobalVariable.VerboseLogger("\nGenerate %s FV Successfully" % self.UiFvName)
+ GenFdsGlobalVariable.SharpCounter = 0
+
+ FvFileObj.seek(0)
+ Buffer.write(FvFileObj.read())
+ # FV alignment position.
+ FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E:0x2F]) & 0x1F)
+ if FvAlignmentValue >= 0x400:
+ if FvAlignmentValue >= 0x100000:
+ if FvAlignmentValue >= 0x1000000:
+ #The max alignment supported by FFS is 16M.
+ self.FvAlignment = "16M"
+ else:
+ self.FvAlignment = str(FvAlignmentValue // 0x100000) + "M"
+ else:
+ self.FvAlignment = str(FvAlignmentValue // 0x400) + "K"
+ else:
+ # FvAlignmentValue is less than 1K
+ self.FvAlignment = str (FvAlignmentValue)
+ FvFileObj.close()
+ GenFdsGlobalVariable.ImageBinDict[self.UiFvName.upper() + 'fv'] = FvOutputFile
+ GenFdsGlobalVariable.LargeFileInFvFlags.pop()
+ else:
+ GenFdsGlobalVariable.ErrorLogger("Invalid FV file %s." % self.UiFvName)
+ else:
+ GenFdsGlobalVariable.ErrorLogger("Failed to generate %s FV file." %self.UiFvName)
+ return FvOutputFile
+
+ ## _GetBlockSize()
+ #
+ # Calculate FV's block size
+ # Inherit block size from FD if no block size specified in FV
+ #
+ def _GetBlockSize(self):
+ if self.BlockSizeList:
+ return True
+
+ for FdObj in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
+ for RegionObj in FdObj.RegionList:
+ if RegionObj.RegionType != BINARY_FILE_TYPE_FV:
+ continue
+ for RegionData in RegionObj.RegionDataList:
+ #
+ # Found the FD and region that contain this FV
+ #
+ if self.UiFvName.upper() == RegionData.upper():
+ RegionObj.BlockInfoOfRegion(FdObj.BlockSizeList, self)
+ if self.BlockSizeList:
+ return True
+ return False
+
+ ## _InitializeInf()
+ #
+ # Initialize the inf file to create FV
+ #
+ # @param self The object pointer
+ # @param BaseAddress base address of FV
+ # @param BlockSize block size of FV
+ # @param BlockNum How many blocks in FV
+ # @param ErasePolarity Flash erase polarity
+ #
+ def _InitializeInf (self, BaseAddress = None, BlockSize= None, BlockNum = None, ErasePloarity='1'):
+ #
+ # Create FV inf file
+ #
+ self.InfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
+ self.UiFvName + '.inf')
+ self.FvInfFile = []
+
+ #
+ # Add [Options]
+ #
+ self.FvInfFile.append("[options]" + TAB_LINE_BREAK)
+ if BaseAddress is not None:
+ self.FvInfFile.append("EFI_BASE_ADDRESS = " + \
+ BaseAddress + \
+ TAB_LINE_BREAK)
+
+ if BlockSize is not None:
+ self.FvInfFile.append("EFI_BLOCK_SIZE = " + \
+ '0x%X' %BlockSize + \
+ TAB_LINE_BREAK)
+ if BlockNum is not None:
+ self.FvInfFile.append("EFI_NUM_BLOCKS = " + \
+ ' 0x%X' %BlockNum + \
+ TAB_LINE_BREAK)
+ else:
+ if self.BlockSizeList == []:
+ if not self._GetBlockSize():
+ #set default block size is 1
+ self.FvInfFile.append("EFI_BLOCK_SIZE = 0x1" + TAB_LINE_BREAK)
+
+ for BlockSize in self.BlockSizeList:
+ if BlockSize[0] is not None:
+ self.FvInfFile.append("EFI_BLOCK_SIZE = " + \
+ '0x%X' %BlockSize[0] + \
+ TAB_LINE_BREAK)
+
+ if BlockSize[1] is not None:
+ self.FvInfFile.append("EFI_NUM_BLOCKS = " + \
+ ' 0x%X' %BlockSize[1] + \
+ TAB_LINE_BREAK)
+
+ if self.BsBaseAddress is not None:
+ self.FvInfFile.append('EFI_BOOT_DRIVER_BASE_ADDRESS = ' + \
+ '0x%X' %self.BsBaseAddress)
+ if self.RtBaseAddress is not None:
+ self.FvInfFile.append('EFI_RUNTIME_DRIVER_BASE_ADDRESS = ' + \
+ '0x%X' %self.RtBaseAddress)
+ #
+ # Add attribute
+ #
+ self.FvInfFile.append("[attributes]" + TAB_LINE_BREAK)
+
+ self.FvInfFile.append("EFI_ERASE_POLARITY = " + \
+ ' %s' %ErasePloarity + \
+ TAB_LINE_BREAK)
+ if not (self.FvAttributeDict is None):
+ for FvAttribute in self.FvAttributeDict.keys():
+ if FvAttribute == "FvUsedSizeEnable":
+ if self.FvAttributeDict[FvAttribute].upper() in ('TRUE', '1'):
+ self.UsedSizeEnable = True
+ continue
+ self.FvInfFile.append("EFI_" + \
+ FvAttribute + \
+ ' = ' + \
+ self.FvAttributeDict[FvAttribute] + \
+ TAB_LINE_BREAK )
+ if self.FvAlignment is not None:
+ self.FvInfFile.append("EFI_FVB2_ALIGNMENT_" + \
+ self.FvAlignment.strip() + \
+ " = TRUE" + \
+ TAB_LINE_BREAK)
+
+ #
+ # Generate FV extension header file
+ #
+ if not self.FvNameGuid:
+ if len(self.FvExtEntryType) > 0 or self.UsedSizeEnable:
+ GenFdsGlobalVariable.ErrorLogger("FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName))
+ else:
+ TotalSize = 16 + 4
+ Buffer = bytearray()
+ if self.UsedSizeEnable:
+ TotalSize += (4 + 4)
+ ## define EFI_FV_EXT_TYPE_USED_SIZE_TYPE 0x03
+ #typedef struct
+ # {
+ # EFI_FIRMWARE_VOLUME_EXT_ENTRY Hdr;
+ # UINT32 UsedSize;
+ # } EFI_FIRMWARE_VOLUME_EXT_ENTRY_USED_SIZE_TYPE;
+ Buffer += pack('HHL', 8, 3, 0)
+
+ if self.FvNameString == 'TRUE':
+ #
+ # Create EXT entry for FV UI name
+ # This GUID is used: A67DF1FA-8DE8-4E98-AF09-4BDF2EFFBC7C
+ #
+ FvUiLen = len(self.UiFvName)
+ TotalSize += (FvUiLen + 16 + 4)
+ Guid = FV_UI_EXT_ENTY_GUID.split('-')
+ #
+ # Layout:
+ # EFI_FIRMWARE_VOLUME_EXT_ENTRY: size 4
+ # GUID: size 16
+ # FV UI name
+ #
+ Buffer += (pack('HH', (FvUiLen + 16 + 4), 0x0002)
+ + PackGUID(Guid)
+ + self.UiFvName.encode('utf-8'))
+
+ for Index in range (0, len(self.FvExtEntryType)):
+ if self.FvExtEntryType[Index] == 'FILE':
+ # check if the path is absolute or relative
+ if os.path.isabs(self.FvExtEntryData[Index]):
+ FileFullPath = os.path.normpath(self.FvExtEntryData[Index])
+ else:
+ FileFullPath = os.path.normpath(os.path.join(GenFdsGlobalVariable.WorkSpaceDir, self.FvExtEntryData[Index]))
+ # check if the file path exists or not
+ if not os.path.isfile(FileFullPath):
+ GenFdsGlobalVariable.ErrorLogger("Error opening FV Extension Header Entry file %s." % (self.FvExtEntryData[Index]))
+ FvExtFile = open (FileFullPath, 'rb')
+ FvExtFile.seek(0, 2)
+ Size = FvExtFile.tell()
+ if Size >= 0x10000:
+ GenFdsGlobalVariable.ErrorLogger("The size of FV Extension Header Entry file %s exceeds 0x10000." % (self.FvExtEntryData[Index]))
+ TotalSize += (Size + 4)
+ FvExtFile.seek(0)
+ Buffer += pack('HH', (Size + 4), int(self.FvExtEntryTypeValue[Index], 16))
+ Buffer += FvExtFile.read()
+ FvExtFile.close()
+ if self.FvExtEntryType[Index] == 'DATA':
+ ByteList = self.FvExtEntryData[Index].split(',')
+ Size = len (ByteList)
+ if Size >= 0x10000:
+ GenFdsGlobalVariable.ErrorLogger("The size of FV Extension Header Entry data %s exceeds 0x10000." % (self.FvExtEntryData[Index]))
+ TotalSize += (Size + 4)
+ Buffer += pack('HH', (Size + 4), int(self.FvExtEntryTypeValue[Index], 16))
+ for Index1 in range (0, Size):
+ Buffer += pack('B', int(ByteList[Index1], 16))
+
+ Guid = self.FvNameGuid.split('-')
+ Buffer = PackGUID(Guid) + pack('=L', TotalSize) + Buffer
+
+ #
+ # Generate FV extension header file if the total size is not zero
+ #
+ if TotalSize > 0:
+ FvExtHeaderFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName + '.ext')
+ FvExtHeaderFile = BytesIO()
+ FvExtHeaderFile.write(Buffer)
+ Changed = SaveFileOnChange(FvExtHeaderFileName, FvExtHeaderFile.getvalue(), True)
+ FvExtHeaderFile.close()
+ if Changed:
+ if os.path.exists (self.InfFileName):
+ os.remove (self.InfFileName)
+ self.FvInfFile.append("EFI_FV_EXT_HEADER_FILE_NAME = " + \
+ FvExtHeaderFileName + \
+ TAB_LINE_BREAK)
+
+ #
+ # Add [Files]
+ #
+ self.FvInfFile.append("[files]" + TAB_LINE_BREAK)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FvImageSection.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FvImageSection.py
new file mode 100755
index 00000000..8590c64b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/FvImageSection.py
@@ -0,0 +1,158 @@
+## @file
+# process FV image section generation
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from . import Section
+from io import BytesIO
+from .Ffs import SectionSuffix
+import subprocess
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+import Common.LongFilePathOs as os
+from CommonDataClass.FdfClass import FvImageSectionClassObject
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from Common import EdkLogger
+from Common.BuildToolError import *
+from Common.DataType import *
+
+## generate FV image section
+#
+#
+class FvImageSection(FvImageSectionClassObject):
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FvImageSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate FV image section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = None, IsMakefile = False):
+
+ OutputFileList = []
+ if Dict is None:
+ Dict = {}
+ if self.FvFileType is not None:
+ FileList, IsSect = Section.Section.GetFileList(FfsInf, self.FvFileType, self.FvFileExtension)
+ if IsSect :
+ return FileList, self.Alignment
+
+ Num = SecNum
+
+ MaxFvAlignment = 0
+ for FvFileName in FileList:
+ FvAlignmentValue = 0
+ if os.path.isfile(FvFileName):
+ FvFileObj = open (FvFileName, 'rb')
+ FvFileObj.seek(0)
+ # PI FvHeader is 0x48 byte
+ FvHeaderBuffer = FvFileObj.read(0x48)
+ # FV alignment position.
+ if isinstance(FvHeaderBuffer[0x2E], str):
+ FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E]) & 0x1F)
+ else:
+ FvAlignmentValue = 1 << (FvHeaderBuffer[0x2E] & 0x1F)
+ FvFileObj.close()
+ if FvAlignmentValue > MaxFvAlignment:
+ MaxFvAlignment = FvAlignmentValue
+
+ OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + Num + SectionSuffix.get("FV_IMAGE"))
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [FvFileName], 'EFI_SECTION_FIRMWARE_VOLUME_IMAGE', IsMakefile=IsMakefile)
+ OutputFileList.append(OutputFile)
+
+ # MaxFvAlignment is larger than or equal to 1K
+ if MaxFvAlignment >= 0x400:
+ if MaxFvAlignment >= 0x100000:
+ #The max alignment supported by FFS is 16M.
+ if MaxFvAlignment >= 0x1000000:
+ self.Alignment = "16M"
+ else:
+ self.Alignment = str(MaxFvAlignment // 0x100000) + "M"
+ else:
+ self.Alignment = str (MaxFvAlignment // 0x400) + "K"
+ else:
+ # MaxFvAlignment is less than 1K
+ self.Alignment = str (MaxFvAlignment)
+
+ return OutputFileList, self.Alignment
+ #
+ # Generate Fv
+ #
+ if self.FvName is not None:
+ Buffer = BytesIO()
+ Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName)
+ if Fv is not None:
+ self.Fv = Fv
+ if not self.FvAddr and self.Fv.BaseAddress:
+ self.FvAddr = self.Fv.BaseAddress
+ FvFileName = Fv.AddToBuffer(Buffer, self.FvAddr, MacroDict = Dict, Flag=IsMakefile)
+ if Fv.FvAlignment is not None:
+ if self.Alignment is None:
+ self.Alignment = Fv.FvAlignment
+ else:
+ if GenFdsGlobalVariable.GetAlignment (Fv.FvAlignment) > GenFdsGlobalVariable.GetAlignment (self.Alignment):
+ self.Alignment = Fv.FvAlignment
+ else:
+ if self.FvFileName is not None:
+ FvFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvFileName)
+ if os.path.isfile(FvFileName):
+ FvFileObj = open (FvFileName, 'rb')
+ FvFileObj.seek(0)
+ # PI FvHeader is 0x48 byte
+ FvHeaderBuffer = FvFileObj.read(0x48)
+ # FV alignment position.
+ if isinstance(FvHeaderBuffer[0x2E], str):
+ FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E]) & 0x1F)
+ else:
+ FvAlignmentValue = 1 << (FvHeaderBuffer[0x2E] & 0x1F)
+ # FvAlignmentValue is larger than or equal to 1K
+ if FvAlignmentValue >= 0x400:
+ if FvAlignmentValue >= 0x100000:
+ #The max alignment supported by FFS is 16M.
+ if FvAlignmentValue >= 0x1000000:
+ self.Alignment = "16M"
+ else:
+ self.Alignment = str(FvAlignmentValue // 0x100000) + "M"
+ else:
+ self.Alignment = str (FvAlignmentValue // 0x400) + "K"
+ else:
+ # FvAlignmentValue is less than 1K
+ self.Alignment = str (FvAlignmentValue)
+ FvFileObj.close()
+ else:
+ if len (mws.getPkgPath()) == 0:
+ EdkLogger.error("GenFds", FILE_NOT_FOUND, "%s is not found in WORKSPACE: %s" % self.FvFileName, GenFdsGlobalVariable.WorkSpaceDir)
+ else:
+ EdkLogger.error("GenFds", FILE_NOT_FOUND, "%s is not found in packages path:\n\t%s" % (self.FvFileName, '\n\t'.join(mws.getPkgPath())))
+
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "FvImageSection Failed! %s NOT found in FDF" % self.FvName)
+
+ #
+ # Prepare the parameter of GenSection
+ #
+ OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get("FV_IMAGE"))
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [FvFileName], 'EFI_SECTION_FIRMWARE_VOLUME_IMAGE', IsMakefile=IsMakefile)
+ OutputFileList.append(OutputFile)
+
+ return OutputFileList, self.Alignment
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/GenFds.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/GenFds.py
new file mode 100755
index 00000000..74dca772
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/GenFds.py
@@ -0,0 +1,800 @@
+## @file
+# generate flash image
+#
+# Copyright (c) 2007 - 2019, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+from re import compile
+from optparse import OptionParser
+from sys import exit
+from glob import glob
+from struct import unpack
+from linecache import getlines
+from io import BytesIO
+
+import Common.LongFilePathOs as os
+from Common.TargetTxtClassObject import TargetTxtDict
+from Common.DataType import *
+import Common.GlobalData as GlobalData
+from Common import EdkLogger
+from Common.StringUtils import NormPath
+from Common.Misc import DirCache, PathClass, GuidStructureStringToGuidString
+from Common.Misc import SaveFileOnChange, ClearDuplicatedInf
+from Common.BuildVersion import gBUILD_VERSION
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from Common.BuildToolError import FatalError, GENFDS_ERROR, CODE_ERROR, FORMAT_INVALID, RESOURCE_NOT_AVAILABLE, FILE_NOT_FOUND, OPTION_MISSING, FORMAT_NOT_SUPPORTED, OPTION_VALUE_INVALID, PARAMETER_INVALID
+from Workspace.WorkspaceDatabase import WorkspaceDatabase
+
+from .FdfParser import FdfParser, Warning
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+from .FfsFileStatement import FileStatement
+import Common.DataType as DataType
+from struct import Struct
+
+## Version and Copyright
+versionNumber = "1.0" + ' ' + gBUILD_VERSION
+__version__ = "%prog Version " + versionNumber
+__copyright__ = "Copyright (c) 2007 - 2018, Intel Corporation All rights reserved."
+
+## Tool entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @retval 0 Tool was successful
+# @retval 1 Tool failed
+#
+def main():
+ global Options
+ Options = myOptionParser()
+ EdkLogger.Initialize()
+ return GenFdsApi(OptionsToCommandDict(Options))
+
+def resetFdsGlobalVariable():
+ GenFdsGlobalVariable.FvDir = ''
+ GenFdsGlobalVariable.OutputDirDict = {}
+ GenFdsGlobalVariable.BinDir = ''
+ # will be FvDir + os.sep + 'Ffs'
+ GenFdsGlobalVariable.FfsDir = ''
+ GenFdsGlobalVariable.FdfParser = None
+ GenFdsGlobalVariable.LibDir = ''
+ GenFdsGlobalVariable.WorkSpace = None
+ GenFdsGlobalVariable.WorkSpaceDir = ''
+ GenFdsGlobalVariable.ConfDir = ''
+ GenFdsGlobalVariable.OutputDirFromDscDict = {}
+ GenFdsGlobalVariable.TargetName = ''
+ GenFdsGlobalVariable.ToolChainTag = ''
+ GenFdsGlobalVariable.RuleDict = {}
+ GenFdsGlobalVariable.ArchList = None
+ GenFdsGlobalVariable.ActivePlatform = None
+ GenFdsGlobalVariable.FvAddressFileName = ''
+ GenFdsGlobalVariable.VerboseMode = False
+ GenFdsGlobalVariable.DebugLevel = -1
+ GenFdsGlobalVariable.SharpCounter = 0
+ GenFdsGlobalVariable.SharpNumberPerLine = 40
+ GenFdsGlobalVariable.FdfFile = ''
+ GenFdsGlobalVariable.FdfFileTimeStamp = 0
+ GenFdsGlobalVariable.FixedLoadAddress = False
+ GenFdsGlobalVariable.PlatformName = ''
+
+ GenFdsGlobalVariable.BuildRuleFamily = DataType.TAB_COMPILER_MSFT
+ GenFdsGlobalVariable.ToolChainFamily = DataType.TAB_COMPILER_MSFT
+ GenFdsGlobalVariable.__BuildRuleDatabase = None
+ GenFdsGlobalVariable.GuidToolDefinition = {}
+ GenFdsGlobalVariable.FfsCmdDict = {}
+ GenFdsGlobalVariable.SecCmdList = []
+ GenFdsGlobalVariable.CopyList = []
+ GenFdsGlobalVariable.ModuleFile = ''
+ GenFdsGlobalVariable.EnableGenfdsMultiThread = True
+
+ GenFdsGlobalVariable.LargeFileInFvFlags = []
+ GenFdsGlobalVariable.EFI_FIRMWARE_FILE_SYSTEM3_GUID = '5473C07A-3DCB-4dca-BD6F-1E9689E7349A'
+ GenFdsGlobalVariable.LARGE_FILE_SIZE = 0x1000000
+
+ GenFdsGlobalVariable.SectionHeader = Struct("3B 1B")
+
+ # FvName, FdName, CapName in FDF, Image file name
+ GenFdsGlobalVariable.ImageBinDict = {}
+
+def GenFdsApi(FdsCommandDict, WorkSpaceDataBase=None):
+ global Workspace
+ Workspace = ""
+ ArchList = None
+ ReturnCode = 0
+ resetFdsGlobalVariable()
+
+ try:
+ if FdsCommandDict.get("verbose"):
+ EdkLogger.SetLevel(EdkLogger.VERBOSE)
+ GenFdsGlobalVariable.VerboseMode = True
+
+ if FdsCommandDict.get("FixedAddress"):
+ GenFdsGlobalVariable.FixedLoadAddress = True
+
+ if FdsCommandDict.get("quiet"):
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ if FdsCommandDict.get("debug"):
+ EdkLogger.SetLevel(FdsCommandDict.get("debug") + 1)
+ GenFdsGlobalVariable.DebugLevel = FdsCommandDict.get("debug")
+ else:
+ EdkLogger.SetLevel(EdkLogger.INFO)
+
+ if not FdsCommandDict.get("Workspace",os.environ.get('WORKSPACE')):
+ EdkLogger.error("GenFds", OPTION_MISSING, "WORKSPACE not defined",
+ ExtraData="Please use '-w' switch to pass it or set the WORKSPACE environment variable.")
+ elif not os.path.exists(FdsCommandDict.get("Workspace",os.environ.get('WORKSPACE'))):
+ EdkLogger.error("GenFds", PARAMETER_INVALID, "WORKSPACE is invalid",
+ ExtraData="Please use '-w' switch to pass it or set the WORKSPACE environment variable.")
+ else:
+ Workspace = os.path.normcase(FdsCommandDict.get("Workspace",os.environ.get('WORKSPACE')))
+ GenFdsGlobalVariable.WorkSpaceDir = Workspace
+ if FdsCommandDict.get("debug"):
+ GenFdsGlobalVariable.VerboseLogger("Using Workspace:" + Workspace)
+ if FdsCommandDict.get("GenfdsMultiThread"):
+ GenFdsGlobalVariable.EnableGenfdsMultiThread = True
+ else:
+ GenFdsGlobalVariable.EnableGenfdsMultiThread = False
+ os.chdir(GenFdsGlobalVariable.WorkSpaceDir)
+
+ # set multiple workspace
+ PackagesPath = os.getenv("PACKAGES_PATH")
+ mws.setWs(GenFdsGlobalVariable.WorkSpaceDir, PackagesPath)
+
+ if FdsCommandDict.get("fdf_file"):
+ FdfFilename = FdsCommandDict.get("fdf_file")[0].Path
+ FdfFilename = GenFdsGlobalVariable.ReplaceWorkspaceMacro(FdfFilename)
+
+ if FdfFilename[0:2] == '..':
+ FdfFilename = os.path.realpath(FdfFilename)
+ if not os.path.isabs(FdfFilename):
+ FdfFilename = mws.join(GenFdsGlobalVariable.WorkSpaceDir, FdfFilename)
+ if not os.path.exists(FdfFilename):
+ EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=FdfFilename)
+
+ GenFdsGlobalVariable.FdfFile = FdfFilename
+ GenFdsGlobalVariable.FdfFileTimeStamp = os.path.getmtime(FdfFilename)
+ else:
+ EdkLogger.error("GenFds", OPTION_MISSING, "Missing FDF filename")
+
+ if FdsCommandDict.get("build_target"):
+ GenFdsGlobalVariable.TargetName = FdsCommandDict.get("build_target")
+
+ if FdsCommandDict.get("toolchain_tag"):
+ GenFdsGlobalVariable.ToolChainTag = FdsCommandDict.get("toolchain_tag")
+
+ if FdsCommandDict.get("active_platform"):
+ ActivePlatform = FdsCommandDict.get("active_platform")
+ ActivePlatform = GenFdsGlobalVariable.ReplaceWorkspaceMacro(ActivePlatform)
+
+ if ActivePlatform[0:2] == '..':
+ ActivePlatform = os.path.realpath(ActivePlatform)
+
+ if not os.path.isabs (ActivePlatform):
+ ActivePlatform = mws.join(GenFdsGlobalVariable.WorkSpaceDir, ActivePlatform)
+
+ if not os.path.exists(ActivePlatform):
+ EdkLogger.error("GenFds", FILE_NOT_FOUND, "ActivePlatform doesn't exist!")
+ else:
+ EdkLogger.error("GenFds", OPTION_MISSING, "Missing active platform")
+
+ GenFdsGlobalVariable.ActivePlatform = PathClass(NormPath(ActivePlatform))
+
+ if FdsCommandDict.get("conf_directory"):
+ # Get alternate Conf location, if it is absolute, then just use the absolute directory name
+ ConfDirectoryPath = os.path.normpath(FdsCommandDict.get("conf_directory"))
+ if ConfDirectoryPath.startswith('"'):
+ ConfDirectoryPath = ConfDirectoryPath[1:]
+ if ConfDirectoryPath.endswith('"'):
+ ConfDirectoryPath = ConfDirectoryPath[:-1]
+ if not os.path.isabs(ConfDirectoryPath):
+ # Since alternate directory name is not absolute, the alternate directory is located within the WORKSPACE
+ # This also handles someone specifying the Conf directory in the workspace. Using --conf=Conf
+ ConfDirectoryPath = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, ConfDirectoryPath)
+ else:
+ if "CONF_PATH" in os.environ:
+ ConfDirectoryPath = os.path.normcase(os.environ["CONF_PATH"])
+ else:
+ # Get standard WORKSPACE/Conf, use the absolute path to the WORKSPACE/Conf
+ ConfDirectoryPath = mws.join(GenFdsGlobalVariable.WorkSpaceDir, 'Conf')
+ GenFdsGlobalVariable.ConfDir = ConfDirectoryPath
+ if not GlobalData.gConfDirectory:
+ GlobalData.gConfDirectory = GenFdsGlobalVariable.ConfDir
+ BuildConfigurationFile = os.path.normpath(os.path.join(ConfDirectoryPath, "target.txt"))
+ if os.path.isfile(BuildConfigurationFile) == True:
+ # if no build target given in command line, get it from target.txt
+ TargetObj = TargetTxtDict()
+ TargetTxt = TargetObj.Target
+ if not GenFdsGlobalVariable.TargetName:
+ BuildTargetList = TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_TARGET]
+ if len(BuildTargetList) != 1:
+ EdkLogger.error("GenFds", OPTION_VALUE_INVALID, ExtraData="Only allows one instance for Target.")
+ GenFdsGlobalVariable.TargetName = BuildTargetList[0]
+
+ # if no tool chain given in command line, get it from target.txt
+ if not GenFdsGlobalVariable.ToolChainTag:
+ ToolChainList = TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_TOOL_CHAIN_TAG]
+ if ToolChainList is None or len(ToolChainList) == 0:
+ EdkLogger.error("GenFds", RESOURCE_NOT_AVAILABLE, ExtraData="No toolchain given. Don't know how to build.")
+ if len(ToolChainList) != 1:
+ EdkLogger.error("GenFds", OPTION_VALUE_INVALID, ExtraData="Only allows one instance for ToolChain.")
+ GenFdsGlobalVariable.ToolChainTag = ToolChainList[0]
+ else:
+ EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=BuildConfigurationFile)
+
+ #Set global flag for build mode
+ GlobalData.gIgnoreSource = FdsCommandDict.get("IgnoreSources")
+
+ if FdsCommandDict.get("macro"):
+ for Pair in FdsCommandDict.get("macro"):
+ if Pair.startswith('"'):
+ Pair = Pair[1:]
+ if Pair.endswith('"'):
+ Pair = Pair[:-1]
+ List = Pair.split('=')
+ if len(List) == 2:
+ if not List[1].strip():
+ EdkLogger.error("GenFds", OPTION_VALUE_INVALID, ExtraData="No Value given for Macro %s" %List[0])
+ if List[0].strip() in ["WORKSPACE", "TARGET", "TOOLCHAIN"]:
+ GlobalData.gGlobalDefines[List[0].strip()] = List[1].strip()
+ else:
+ GlobalData.gCommandLineDefines[List[0].strip()] = List[1].strip()
+ else:
+ GlobalData.gCommandLineDefines[List[0].strip()] = "TRUE"
+ os.environ["WORKSPACE"] = Workspace
+
+ # Use the -t and -b option as gGlobalDefines's TOOLCHAIN and TARGET if they are not defined
+ if "TARGET" not in GlobalData.gGlobalDefines:
+ GlobalData.gGlobalDefines["TARGET"] = GenFdsGlobalVariable.TargetName
+ if "TOOLCHAIN" not in GlobalData.gGlobalDefines:
+ GlobalData.gGlobalDefines["TOOLCHAIN"] = GenFdsGlobalVariable.ToolChainTag
+ if "TOOL_CHAIN_TAG" not in GlobalData.gGlobalDefines:
+ GlobalData.gGlobalDefines['TOOL_CHAIN_TAG'] = GenFdsGlobalVariable.ToolChainTag
+
+ """call Workspace build create database"""
+ GlobalData.gDatabasePath = os.path.normpath(os.path.join(ConfDirectoryPath, GlobalData.gDatabasePath))
+
+ if WorkSpaceDataBase:
+ BuildWorkSpace = WorkSpaceDataBase
+ else:
+ BuildWorkSpace = WorkspaceDatabase()
+ #
+ # Get files real name in workspace dir
+ #
+ GlobalData.gAllFiles = DirCache(Workspace)
+ GlobalData.gWorkspace = Workspace
+
+ if FdsCommandDict.get("build_architecture_list"):
+ ArchList = FdsCommandDict.get("build_architecture_list").split(',')
+ else:
+ ArchList = BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, TAB_COMMON, FdsCommandDict.get("build_target"), FdsCommandDict.get("toolchain_tag")].SupArchList
+
+ TargetArchList = set(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, TAB_COMMON, FdsCommandDict.get("build_target"), FdsCommandDict.get("toolchain_tag")].SupArchList) & set(ArchList)
+ if len(TargetArchList) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Target ARCH %s not in platform supported ARCH %s" % (str(ArchList), str(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, TAB_COMMON].SupArchList)))
+
+ for Arch in ArchList:
+ GenFdsGlobalVariable.OutputDirFromDscDict[Arch] = NormPath(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, FdsCommandDict.get("build_target"), FdsCommandDict.get("toolchain_tag")].OutputDirectory)
+
+ # assign platform name based on last entry in ArchList
+ GenFdsGlobalVariable.PlatformName = BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, ArchList[-1], FdsCommandDict.get("build_target"), FdsCommandDict.get("toolchain_tag")].PlatformName
+
+ if FdsCommandDict.get("platform_build_directory"):
+ OutputDirFromCommandLine = GenFdsGlobalVariable.ReplaceWorkspaceMacro(FdsCommandDict.get("platform_build_directory"))
+ if not os.path.isabs (OutputDirFromCommandLine):
+ OutputDirFromCommandLine = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, OutputDirFromCommandLine)
+ for Arch in ArchList:
+ GenFdsGlobalVariable.OutputDirDict[Arch] = OutputDirFromCommandLine
+ else:
+ for Arch in ArchList:
+ GenFdsGlobalVariable.OutputDirDict[Arch] = os.path.join(GenFdsGlobalVariable.OutputDirFromDscDict[Arch], GenFdsGlobalVariable.TargetName + '_' + GenFdsGlobalVariable.ToolChainTag)
+
+ for Key in GenFdsGlobalVariable.OutputDirDict:
+ OutputDir = GenFdsGlobalVariable.OutputDirDict[Key]
+ if OutputDir[0:2] == '..':
+ OutputDir = os.path.realpath(OutputDir)
+
+ if OutputDir[1] != ':':
+ OutputDir = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, OutputDir)
+
+ if not os.path.exists(OutputDir):
+ EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=OutputDir)
+ GenFdsGlobalVariable.OutputDirDict[Key] = OutputDir
+
+ """ Parse Fdf file, has to place after build Workspace as FDF may contain macros from DSC file """
+ if WorkSpaceDataBase:
+ FdfParserObj = GlobalData.gFdfParser
+ else:
+ FdfParserObj = FdfParser(FdfFilename)
+ FdfParserObj.ParseFile()
+
+ if FdfParserObj.CycleReferenceCheck():
+ EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "Cycle Reference Detected in FDF file")
+
+ if FdsCommandDict.get("fd"):
+ if FdsCommandDict.get("fd")[0].upper() in FdfParserObj.Profile.FdDict:
+ GenFds.OnlyGenerateThisFd = FdsCommandDict.get("fd")[0]
+ else:
+ EdkLogger.error("GenFds", OPTION_VALUE_INVALID,
+ "No such an FD in FDF file: %s" % FdsCommandDict.get("fd")[0])
+
+ if FdsCommandDict.get("fv"):
+ if FdsCommandDict.get("fv")[0].upper() in FdfParserObj.Profile.FvDict:
+ GenFds.OnlyGenerateThisFv = FdsCommandDict.get("fv")[0]
+ else:
+ EdkLogger.error("GenFds", OPTION_VALUE_INVALID,
+ "No such an FV in FDF file: %s" % FdsCommandDict.get("fv")[0])
+
+ if FdsCommandDict.get("cap"):
+ if FdsCommandDict.get("cap")[0].upper() in FdfParserObj.Profile.CapsuleDict:
+ GenFds.OnlyGenerateThisCap = FdsCommandDict.get("cap")[0]
+ else:
+ EdkLogger.error("GenFds", OPTION_VALUE_INVALID,
+ "No such a Capsule in FDF file: %s" % FdsCommandDict.get("cap")[0])
+
+ GenFdsGlobalVariable.WorkSpace = BuildWorkSpace
+ if ArchList:
+ GenFdsGlobalVariable.ArchList = ArchList
+
+ # Dsc Build Data will handle Pcd Settings from CommandLine.
+
+ """Modify images from build output if the feature of loading driver at fixed address is on."""
+ if GenFdsGlobalVariable.FixedLoadAddress:
+ GenFds.PreprocessImage(BuildWorkSpace, GenFdsGlobalVariable.ActivePlatform)
+
+ # Record the FV Region info that may specific in the FD
+ if FdfParserObj.Profile.FvDict and FdfParserObj.Profile.FdDict:
+ for FvObj in FdfParserObj.Profile.FvDict.values():
+ for FdObj in FdfParserObj.Profile.FdDict.values():
+ for RegionObj in FdObj.RegionList:
+ if RegionObj.RegionType != BINARY_FILE_TYPE_FV:
+ continue
+ for RegionData in RegionObj.RegionDataList:
+ if FvObj.UiFvName.upper() == RegionData.upper():
+ if not FvObj.BaseAddress:
+ FvObj.BaseAddress = '0x%x' % (int(FdObj.BaseAddress, 0) + RegionObj.Offset)
+ if FvObj.FvRegionInFD:
+ if FvObj.FvRegionInFD != RegionObj.Size:
+ EdkLogger.error("GenFds", FORMAT_INVALID, "The FV %s's region is specified in multiple FD with different value." %FvObj.UiFvName)
+ else:
+ FvObj.FvRegionInFD = RegionObj.Size
+ RegionObj.BlockInfoOfRegion(FdObj.BlockSizeList, FvObj)
+
+ """Call GenFds"""
+ GenFds.GenFd('', FdfParserObj, BuildWorkSpace, ArchList)
+
+ """Generate GUID cross reference file"""
+ GenFds.GenerateGuidXRefFile(BuildWorkSpace, ArchList, FdfParserObj)
+
+ """Display FV space info."""
+ GenFds.DisplayFvSpaceInfo(FdfParserObj)
+
+ except Warning as X:
+ EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)
+ ReturnCode = FORMAT_INVALID
+ except FatalError as X:
+ if FdsCommandDict.get("debug") is not None:
+ import traceback
+ EdkLogger.quiet(traceback.format_exc())
+ ReturnCode = X.args[0]
+ except:
+ import traceback
+ EdkLogger.error(
+ "\nPython",
+ CODE_ERROR,
+ "Tools code failure",
+ ExtraData="Please send email to %s for help, attaching following call stack trace!\n" % MSG_EDKII_MAIL_ADDR,
+ RaiseError=False
+ )
+ EdkLogger.quiet(traceback.format_exc())
+ ReturnCode = CODE_ERROR
+ finally:
+ ClearDuplicatedInf()
+ return ReturnCode
+
+def OptionsToCommandDict(Options):
+ FdsCommandDict = {}
+ FdsCommandDict["verbose"] = Options.verbose
+ FdsCommandDict["FixedAddress"] = Options.FixedAddress
+ FdsCommandDict["quiet"] = Options.quiet
+ FdsCommandDict["debug"] = Options.debug
+ FdsCommandDict["Workspace"] = Options.Workspace
+ FdsCommandDict["GenfdsMultiThread"] = not Options.NoGenfdsMultiThread
+ FdsCommandDict["fdf_file"] = [PathClass(Options.filename)] if Options.filename else []
+ FdsCommandDict["build_target"] = Options.BuildTarget
+ FdsCommandDict["toolchain_tag"] = Options.ToolChain
+ FdsCommandDict["active_platform"] = Options.activePlatform
+ FdsCommandDict["OptionPcd"] = Options.OptionPcd
+ FdsCommandDict["conf_directory"] = Options.ConfDirectory
+ FdsCommandDict["IgnoreSources"] = Options.IgnoreSources
+ FdsCommandDict["macro"] = Options.Macros
+ FdsCommandDict["build_architecture_list"] = Options.archList
+ FdsCommandDict["platform_build_directory"] = Options.outputDir
+ FdsCommandDict["fd"] = [Options.uiFdName] if Options.uiFdName else []
+ FdsCommandDict["fv"] = [Options.uiFvName] if Options.uiFvName else []
+ FdsCommandDict["cap"] = [Options.uiCapName] if Options.uiCapName else []
+ return FdsCommandDict
+
+
+gParamCheck = []
+def SingleCheckCallback(option, opt_str, value, parser):
+ if option not in gParamCheck:
+ setattr(parser.values, option.dest, value)
+ gParamCheck.append(option)
+ else:
+ parser.error("Option %s only allows one instance in command line!" % option)
+
+## Parse command line options
+#
+# Using standard Python module optparse to parse command line option of this tool.
+#
+# @retval Opt A optparse.Values object containing the parsed options
+#
+def myOptionParser():
+ usage = "%prog [options] -f input_file -a arch_list -b build_target -p active_platform -t tool_chain_tag -D \"MacroName [= MacroValue]\""
+ Parser = OptionParser(usage=usage, description=__copyright__, version="%prog " + str(versionNumber))
+ Parser.add_option("-f", "--file", dest="filename", type="string", help="Name of FDF file to convert", action="callback", callback=SingleCheckCallback)
+ Parser.add_option("-a", "--arch", dest="archList", help="comma separated list containing one or more of: IA32, X64, IPF, ARM, AARCH64 or EBC which should be built, overrides target.txt?s TARGET_ARCH")
+ Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
+ Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed.")
+ Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
+ Parser.add_option("-p", "--platform", type="string", dest="activePlatform", help="Set the ACTIVE_PLATFORM, overrides target.txt ACTIVE_PLATFORM setting.",
+ action="callback", callback=SingleCheckCallback)
+ Parser.add_option("-w", "--workspace", type="string", dest="Workspace", default=os.environ.get('WORKSPACE'), help="Set the WORKSPACE",
+ action="callback", callback=SingleCheckCallback)
+ Parser.add_option("-o", "--outputDir", type="string", dest="outputDir", help="Name of Build Output directory",
+ action="callback", callback=SingleCheckCallback)
+ Parser.add_option("-r", "--rom_image", dest="uiFdName", help="Build the image using the [FD] section named by FdUiName.")
+ Parser.add_option("-i", "--FvImage", dest="uiFvName", help="Build the FV image using the [FV] section named by UiFvName")
+ Parser.add_option("-C", "--CapsuleImage", dest="uiCapName", help="Build the Capsule image using the [Capsule] section named by UiCapName")
+ Parser.add_option("-b", "--buildtarget", type="string", dest="BuildTarget", help="Set the build TARGET, overrides target.txt TARGET setting.",
+ action="callback", callback=SingleCheckCallback)
+ Parser.add_option("-t", "--tagname", type="string", dest="ToolChain", help="Using the tools: TOOL_CHAIN_TAG name to build the platform.",
+ action="callback", callback=SingleCheckCallback)
+ Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".")
+ Parser.add_option("-s", "--specifyaddress", dest="FixedAddress", action="store_true", type=None, help="Specify driver load address.")
+ Parser.add_option("--conf", action="store", type="string", dest="ConfDirectory", help="Specify the customized Conf directory.")
+ Parser.add_option("--ignore-sources", action="store_true", dest="IgnoreSources", default=False, help="Focus to a binary build and ignore all source files")
+ Parser.add_option("--pcd", action="append", dest="OptionPcd", help="Set PCD value by command line. Format: \"PcdName=Value\" ")
+ Parser.add_option("--genfds-multi-thread", action="store_true", dest="GenfdsMultiThread", default=True, help="Enable GenFds multi thread to generate ffs file.")
+ Parser.add_option("--no-genfds-multi-thread", action="store_true", dest="NoGenfdsMultiThread", default=False, help="Disable GenFds multi thread to generate ffs file.")
+
+ Options, _ = Parser.parse_args()
+ return Options
+
+## The class implementing the EDK2 flash image generation process
+#
+# This process includes:
+# 1. Collect workspace information, includes platform and module information
+# 2. Call methods of Fd class to generate FD
+# 3. Call methods of Fv class to generate FV that not belong to FD
+#
+class GenFds(object):
+ FdfParsef = None
+ OnlyGenerateThisFd = None
+ OnlyGenerateThisFv = None
+ OnlyGenerateThisCap = None
+
+ ## GenFd()
+ #
+ # @param OutputDir Output directory
+ # @param FdfParserObject FDF contents parser
+ # @param Workspace The directory of workspace
+ # @param ArchList The Arch list of platform
+ #
+ @staticmethod
+ def GenFd (OutputDir, FdfParserObject, WorkSpace, ArchList):
+ GenFdsGlobalVariable.SetDir ('', FdfParserObject, WorkSpace, ArchList)
+
+ GenFdsGlobalVariable.VerboseLogger(" Generate all Fd images and their required FV and Capsule images!")
+ if GenFds.OnlyGenerateThisCap is not None and GenFds.OnlyGenerateThisCap.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict:
+ CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict[GenFds.OnlyGenerateThisCap.upper()]
+ if CapsuleObj is not None:
+ CapsuleObj.GenCapsule()
+ return
+
+ if GenFds.OnlyGenerateThisFd is not None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict:
+ FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[GenFds.OnlyGenerateThisFd.upper()]
+ if FdObj is not None:
+ FdObj.GenFd()
+ return
+ elif GenFds.OnlyGenerateThisFd is None and GenFds.OnlyGenerateThisFv is None:
+ for FdObj in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
+ FdObj.GenFd()
+
+ GenFdsGlobalVariable.VerboseLogger("\n Generate other FV images! ")
+ if GenFds.OnlyGenerateThisFv is not None and GenFds.OnlyGenerateThisFv.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
+ FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[GenFds.OnlyGenerateThisFv.upper()]
+ if FvObj is not None:
+ Buffer = BytesIO()
+ FvObj.AddToBuffer(Buffer)
+ Buffer.close()
+ return
+ elif GenFds.OnlyGenerateThisFv is None:
+ for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict.values():
+ Buffer = BytesIO()
+ FvObj.AddToBuffer(Buffer)
+ Buffer.close()
+
+ if GenFds.OnlyGenerateThisFv is None and GenFds.OnlyGenerateThisFd is None and GenFds.OnlyGenerateThisCap is None:
+ if GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict != {}:
+ GenFdsGlobalVariable.VerboseLogger("\n Generate other Capsule images!")
+ for CapsuleObj in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.values():
+ CapsuleObj.GenCapsule()
+
+ if GenFdsGlobalVariable.FdfParser.Profile.OptRomDict != {}:
+ GenFdsGlobalVariable.VerboseLogger("\n Generate all Option ROM!")
+ for OptRomObj in GenFdsGlobalVariable.FdfParser.Profile.OptRomDict.values():
+ OptRomObj.AddToBuffer(None)
+
+ @staticmethod
+ def GenFfsMakefile(OutputDir, FdfParserObject, WorkSpace, ArchList, GlobalData):
+ GenFdsGlobalVariable.SetEnv(FdfParserObject, WorkSpace, ArchList, GlobalData)
+ for FdObj in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
+ FdObj.GenFd(Flag=True)
+
+ for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict.values():
+ FvObj.AddToBuffer(Buffer=None, Flag=True)
+
+ if GenFdsGlobalVariable.FdfParser.Profile.OptRomDict != {}:
+ for OptRomObj in GenFdsGlobalVariable.FdfParser.Profile.OptRomDict.values():
+ OptRomObj.AddToBuffer(Buffer=None, Flag=True)
+
+ return GenFdsGlobalVariable.FfsCmdDict
+
+ ## GetFvBlockSize()
+ #
+ # @param FvObj Whose block size to get
+ # @retval int Block size value
+ #
+ @staticmethod
+ def GetFvBlockSize(FvObj):
+ DefaultBlockSize = 0x1
+ FdObj = None
+ if GenFds.OnlyGenerateThisFd is not None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict:
+ FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[GenFds.OnlyGenerateThisFd.upper()]
+ if FdObj is None:
+ for ElementFd in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
+ for ElementRegion in ElementFd.RegionList:
+ if ElementRegion.RegionType == BINARY_FILE_TYPE_FV:
+ for ElementRegionData in ElementRegion.RegionDataList:
+ if ElementRegionData is not None and ElementRegionData.upper() == FvObj.UiFvName:
+ if FvObj.BlockSizeList != []:
+ return FvObj.BlockSizeList[0][0]
+ else:
+ return ElementRegion.BlockSizeOfRegion(ElementFd.BlockSizeList)
+ if FvObj.BlockSizeList != []:
+ return FvObj.BlockSizeList[0][0]
+ return DefaultBlockSize
+ else:
+ for ElementRegion in FdObj.RegionList:
+ if ElementRegion.RegionType == BINARY_FILE_TYPE_FV:
+ for ElementRegionData in ElementRegion.RegionDataList:
+ if ElementRegionData is not None and ElementRegionData.upper() == FvObj.UiFvName:
+ if FvObj.BlockSizeList != []:
+ return FvObj.BlockSizeList[0][0]
+ else:
+ return ElementRegion.BlockSizeOfRegion(ElementFd.BlockSizeList)
+ return DefaultBlockSize
+
+ ## DisplayFvSpaceInfo()
+ #
+ # @param FvObj Whose block size to get
+ # @retval None
+ #
+ @staticmethod
+ def DisplayFvSpaceInfo(FdfParserObject):
+
+ FvSpaceInfoList = []
+ MaxFvNameLength = 0
+ for FvName in FdfParserObject.Profile.FvDict:
+ if len(FvName) > MaxFvNameLength:
+ MaxFvNameLength = len(FvName)
+ FvSpaceInfoFileName = os.path.join(GenFdsGlobalVariable.FvDir, FvName.upper() + '.Fv.map')
+ if os.path.exists(FvSpaceInfoFileName):
+ FileLinesList = getlines(FvSpaceInfoFileName)
+ TotalFound = False
+ Total = ''
+ UsedFound = False
+ Used = ''
+ FreeFound = False
+ Free = ''
+ for Line in FileLinesList:
+ NameValue = Line.split('=')
+ if len(NameValue) == 2:
+ if NameValue[0].strip() == 'EFI_FV_TOTAL_SIZE':
+ TotalFound = True
+ Total = NameValue[1].strip()
+ if NameValue[0].strip() == 'EFI_FV_TAKEN_SIZE':
+ UsedFound = True
+ Used = NameValue[1].strip()
+ if NameValue[0].strip() == 'EFI_FV_SPACE_SIZE':
+ FreeFound = True
+ Free = NameValue[1].strip()
+
+ if TotalFound and UsedFound and FreeFound:
+ FvSpaceInfoList.append((FvName, Total, Used, Free))
+
+ GenFdsGlobalVariable.QuietLogger('\nFV Space Information') # VBox: We want this info in -quiet builds too.
+ for FvSpaceInfo in FvSpaceInfoList:
+ Name = FvSpaceInfo[0]
+ TotalSizeValue = int(FvSpaceInfo[1], 0)
+ UsedSizeValue = int(FvSpaceInfo[2], 0)
+ FreeSizeValue = int(FvSpaceInfo[3], 0)
+ if UsedSizeValue == TotalSizeValue:
+ Percentage = '100'
+ else:
+ Percentage = str((UsedSizeValue + 0.0) / TotalSizeValue)[0:4].lstrip('0.')
+
+ GenFdsGlobalVariable.QuietLogger(Name + ' ' + '[' + Percentage + '%Full] '\
+ + str(TotalSizeValue) + ' (' + hex(TotalSizeValue) + ')' + ' total, '\
+ + str(UsedSizeValue) + ' (' + hex(UsedSizeValue) + ')' + ' used, '\
+ + str(FreeSizeValue) + ' (' + hex(FreeSizeValue) + ')' + ' free') # VBox: Same as above
+
+ ## PreprocessImage()
+ #
+ # @param BuildDb Database from build meta data files
+ # @param DscFile modules from dsc file will be preprocessed
+ # @retval None
+ #
+ @staticmethod
+ def PreprocessImage(BuildDb, DscFile):
+ PcdDict = BuildDb.BuildObject[DscFile, TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].Pcds
+ PcdValue = ''
+ for Key in PcdDict:
+ PcdObj = PcdDict[Key]
+ if PcdObj.TokenCName == 'PcdBsBaseAddress':
+ PcdValue = PcdObj.DefaultValue
+ break
+
+ if PcdValue == '':
+ return
+
+ Int64PcdValue = int(PcdValue, 0)
+ if Int64PcdValue == 0 or Int64PcdValue < -1:
+ return
+
+ TopAddress = 0
+ if Int64PcdValue > 0:
+ TopAddress = Int64PcdValue
+
+ ModuleDict = BuildDb.BuildObject[DscFile, TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].Modules
+ for Key in ModuleDict:
+ ModuleObj = BuildDb.BuildObject[Key, TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
+ print(ModuleObj.BaseName + ' ' + ModuleObj.ModuleType)
+
+ @staticmethod
+ def GenerateGuidXRefFile(BuildDb, ArchList, FdfParserObj):
+ GuidXRefFileName = os.path.join(GenFdsGlobalVariable.FvDir, "Guid.xref")
+ GuidXRefFile = []
+ PkgGuidDict = {}
+ GuidDict = {}
+ ModuleList = []
+ FileGuidList = []
+ VariableGuidSet = set()
+ for Arch in ArchList:
+ PlatformDataBase = BuildDb.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
+ PkgList = GenFdsGlobalVariable.WorkSpace.GetPackageList(GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag)
+ for P in PkgList:
+ PkgGuidDict.update(P.Guids)
+ for Name, Guid in PlatformDataBase.Pcds:
+ Pcd = PlatformDataBase.Pcds[Name, Guid]
+ if Pcd.Type in [TAB_PCDS_DYNAMIC_HII, TAB_PCDS_DYNAMIC_EX_HII]:
+ for SkuId in Pcd.SkuInfoList:
+ Sku = Pcd.SkuInfoList[SkuId]
+ if Sku.VariableGuid in VariableGuidSet:continue
+ VariableGuidSet.add(Sku.VariableGuid)
+ if Sku.VariableGuid and Sku.VariableGuid in PkgGuidDict.keys():
+ GuidDict[Sku.VariableGuid] = PkgGuidDict[Sku.VariableGuid]
+ for ModuleFile in PlatformDataBase.Modules:
+ Module = BuildDb.BuildObject[ModuleFile, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
+ if Module in ModuleList:
+ continue
+ else:
+ ModuleList.append(Module)
+ if GlobalData.gGuidPattern.match(ModuleFile.BaseName):
+ GuidXRefFile.append("%s %s\n" % (ModuleFile.BaseName, Module.BaseName))
+ else:
+ GuidXRefFile.append("%s %s\n" % (Module.Guid, Module.BaseName))
+ GuidDict.update(Module.Protocols)
+ GuidDict.update(Module.Guids)
+ GuidDict.update(Module.Ppis)
+ for FvName in FdfParserObj.Profile.FvDict:
+ for FfsObj in FdfParserObj.Profile.FvDict[FvName].FfsList:
+ if not isinstance(FfsObj, FileStatement):
+ InfPath = PathClass(NormPath(mws.join(GenFdsGlobalVariable.WorkSpaceDir, FfsObj.InfFileName)))
+ FdfModule = BuildDb.BuildObject[InfPath, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
+ if FdfModule in ModuleList:
+ continue
+ else:
+ ModuleList.append(FdfModule)
+ GuidXRefFile.append("%s %s\n" % (FdfModule.Guid, FdfModule.BaseName))
+ GuidDict.update(FdfModule.Protocols)
+ GuidDict.update(FdfModule.Guids)
+ GuidDict.update(FdfModule.Ppis)
+ else:
+ FileStatementGuid = FfsObj.NameGuid
+ if FileStatementGuid in FileGuidList:
+ continue
+ else:
+ FileGuidList.append(FileStatementGuid)
+ Name = []
+ FfsPath = os.path.join(GenFdsGlobalVariable.FvDir, 'Ffs')
+ FfsPath = glob(os.path.join(FfsPath, FileStatementGuid) + TAB_STAR)
+ if not FfsPath:
+ continue
+ if not os.path.exists(FfsPath[0]):
+ continue
+ MatchDict = {}
+ ReFileEnds = compile('\S+(.ui)$|\S+(fv.sec.txt)$|\S+(.pe32.txt)$|\S+(.te.txt)$|\S+(.pic.txt)$|\S+(.raw.txt)$|\S+(.ffs.txt)$')
+ FileList = os.listdir(FfsPath[0])
+ for File in FileList:
+ Match = ReFileEnds.search(File)
+ if Match:
+ for Index in range(1, 8):
+ if Match.group(Index) and Match.group(Index) in MatchDict:
+ MatchDict[Match.group(Index)].append(File)
+ elif Match.group(Index):
+ MatchDict[Match.group(Index)] = [File]
+ if not MatchDict:
+ continue
+ if '.ui' in MatchDict:
+ for File in MatchDict['.ui']:
+ with open(os.path.join(FfsPath[0], File), 'rb') as F:
+ F.read()
+ length = F.tell()
+ F.seek(4)
+ TmpStr = unpack('%dh' % ((length - 4) // 2), F.read())
+ Name = ''.join(chr(c) for c in TmpStr[:-1])
+ else:
+ FileList = []
+ if 'fv.sec.txt' in MatchDict:
+ FileList = MatchDict['fv.sec.txt']
+ elif '.pe32.txt' in MatchDict:
+ FileList = MatchDict['.pe32.txt']
+ elif '.te.txt' in MatchDict:
+ FileList = MatchDict['.te.txt']
+ elif '.pic.txt' in MatchDict:
+ FileList = MatchDict['.pic.txt']
+ elif '.raw.txt' in MatchDict:
+ FileList = MatchDict['.raw.txt']
+ elif '.ffs.txt' in MatchDict:
+ FileList = MatchDict['.ffs.txt']
+ else:
+ pass
+ for File in FileList:
+ with open(os.path.join(FfsPath[0], File), 'r') as F:
+ Name.append((F.read().split()[-1]))
+ if not Name:
+ continue
+
+ Name = ' '.join(Name) if isinstance(Name, type([])) else Name
+ GuidXRefFile.append("%s %s\n" %(FileStatementGuid, Name))
+
+ # Append GUIDs, Protocols, and PPIs to the Xref file
+ GuidXRefFile.append("\n")
+ for key, item in GuidDict.items():
+ GuidXRefFile.append("%s %s\n" % (GuidStructureStringToGuidString(item).upper(), key))
+
+ if GuidXRefFile:
+ GuidXRefFile = ''.join(GuidXRefFile)
+ SaveFileOnChange(GuidXRefFileName, GuidXRefFile, False)
+ GenFdsGlobalVariable.QuietLogger("\nGUID cross reference file can be found at %s" % GuidXRefFileName) # VBox: We want this info in -quiet builds too.
+ elif os.path.exists(GuidXRefFileName):
+ os.remove(GuidXRefFileName)
+
+
+if __name__ == '__main__':
+ r = main()
+ ## 0-127 is a safe return range, and 1 is a standard default error
+ if r < 0 or r > 127:
+ r = 1
+ exit(r)
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
new file mode 100755
index 00000000..7454359c
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
@@ -0,0 +1,1033 @@
+## @file
+# Global variables for GenFds
+#
+# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+
+import Common.LongFilePathOs as os
+import sys
+from sys import stdout
+from subprocess import PIPE,Popen
+from struct import Struct
+from array import array
+
+from Common.BuildToolError import COMMAND_FAILURE,GENFDS_ERROR
+from Common import EdkLogger
+from Common.Misc import SaveFileOnChange
+
+from Common.TargetTxtClassObject import TargetTxtDict
+from Common.ToolDefClassObject import ToolDefDict
+from AutoGen.BuildEngine import ToolBuildRule
+import Common.DataType as DataType
+from Common.Misc import PathClass,CreateDirectory
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+import Common.GlobalData as GlobalData
+from Common.BuildToolError import *
+from AutoGen.AutoGen import CalculatePriorityValue
+
+## Global variables
+#
+#
+class GenFdsGlobalVariable:
+ FvDir = ''
+ OutputDirDict = {}
+ BinDir = ''
+ # will be FvDir + os.sep + 'Ffs'
+ FfsDir = ''
+ FdfParser = None
+ LibDir = ''
+ WorkSpace = None
+ WorkSpaceDir = ''
+ ConfDir = ''
+ OutputDirFromDscDict = {}
+ TargetName = ''
+ ToolChainTag = ''
+ RuleDict = {}
+ ArchList = None
+ ActivePlatform = None
+ FvAddressFileName = ''
+ VerboseMode = False
+ DebugLevel = -1
+ SharpCounter = 0
+ SharpNumberPerLine = 40
+ FdfFile = ''
+ FdfFileTimeStamp = 0
+ FixedLoadAddress = False
+ PlatformName = ''
+
+ BuildRuleFamily = DataType.TAB_COMPILER_MSFT
+ ToolChainFamily = DataType.TAB_COMPILER_MSFT
+ __BuildRuleDatabase = None
+ GuidToolDefinition = {}
+ FfsCmdDict = {}
+ SecCmdList = []
+ CopyList = []
+ ModuleFile = ''
+ EnableGenfdsMultiThread = True
+
+ #
+ # The list whose element are flags to indicate if large FFS or SECTION files exist in FV.
+ # At the beginning of each generation of FV, false flag is appended to the list,
+ # after the call to GenerateSection returns, check the size of the output file,
+ # if it is greater than 0xFFFFFF, the tail flag in list is set to true,
+ # and EFI_FIRMWARE_FILE_SYSTEM3_GUID is passed to C GenFv.
+ # At the end of generation of FV, pop the flag.
+ # List is used as a stack to handle nested FV generation.
+ #
+ LargeFileInFvFlags = []
+ EFI_FIRMWARE_FILE_SYSTEM3_GUID = '5473C07A-3DCB-4dca-BD6F-1E9689E7349A'
+ LARGE_FILE_SIZE = 0x1000000
+
+ SectionHeader = Struct("3B 1B")
+
+ # FvName, FdName, CapName in FDF, Image file name
+ ImageBinDict = {}
+
+ ## LoadBuildRule
+ #
+ @staticmethod
+ def _LoadBuildRule():
+ if GenFdsGlobalVariable.__BuildRuleDatabase:
+ return GenFdsGlobalVariable.__BuildRuleDatabase
+ BuildRule = ToolBuildRule()
+ GenFdsGlobalVariable.__BuildRuleDatabase = BuildRule.ToolBuildRule
+ TargetObj = TargetTxtDict()
+ ToolDefinitionFile = TargetObj.Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF]
+ if ToolDefinitionFile == '':
+ ToolDefinitionFile = "Conf/tools_def.txt"
+ if os.path.isfile(ToolDefinitionFile):
+ ToolDefObj = ToolDefDict((os.path.join(os.getenv("WORKSPACE"), "Conf")))
+ ToolDefinition = ToolDefObj.ToolDef.ToolsDefTxtDatabase
+ if DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDefinition \
+ and GenFdsGlobalVariable.ToolChainTag in ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY] \
+ and ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY][GenFdsGlobalVariable.ToolChainTag]:
+ GenFdsGlobalVariable.BuildRuleFamily = ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY][GenFdsGlobalVariable.ToolChainTag]
+
+ if DataType.TAB_TOD_DEFINES_FAMILY in ToolDefinition \
+ and GenFdsGlobalVariable.ToolChainTag in ToolDefinition[DataType.TAB_TOD_DEFINES_FAMILY] \
+ and ToolDefinition[DataType.TAB_TOD_DEFINES_FAMILY][GenFdsGlobalVariable.ToolChainTag]:
+ GenFdsGlobalVariable.ToolChainFamily = ToolDefinition[DataType.TAB_TOD_DEFINES_FAMILY][GenFdsGlobalVariable.ToolChainTag]
+ return GenFdsGlobalVariable.__BuildRuleDatabase
+
+ ## GetBuildRules
+ # @param Inf: object of InfBuildData
+ # @param Arch: current arch
+ #
+ @staticmethod
+ def GetBuildRules(Inf, Arch):
+ if not Arch:
+ Arch = DataType.TAB_COMMON
+
+ if not Arch in GenFdsGlobalVariable.OutputDirDict:
+ return {}
+
+ BuildRuleDatabase = GenFdsGlobalVariable._LoadBuildRule()
+ if not BuildRuleDatabase:
+ return {}
+
+ PathClassObj = PathClass(Inf.MetaFile.File,
+ GenFdsGlobalVariable.WorkSpaceDir)
+ BuildDir = os.path.join(
+ GenFdsGlobalVariable.OutputDirDict[Arch],
+ Arch,
+ PathClassObj.SubDir,
+ PathClassObj.BaseName
+ )
+ BinDir = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch], Arch)
+ Macro = {
+ "WORKSPACE":GenFdsGlobalVariable.WorkSpaceDir,
+ "MODULE_NAME":Inf.BaseName,
+ "MODULE_GUID":Inf.Guid,
+ "MODULE_VERSION":Inf.Version,
+ "MODULE_TYPE":Inf.ModuleType,
+ "MODULE_FILE":str(PathClassObj),
+ "MODULE_FILE_BASE_NAME":PathClassObj.BaseName,
+ "MODULE_RELATIVE_DIR":PathClassObj.SubDir,
+ "MODULE_DIR":PathClassObj.SubDir,
+ "BASE_NAME":Inf.BaseName,
+ "ARCH":Arch,
+ "TOOLCHAIN":GenFdsGlobalVariable.ToolChainTag,
+ "TOOLCHAIN_TAG":GenFdsGlobalVariable.ToolChainTag,
+ "TOOL_CHAIN_TAG":GenFdsGlobalVariable.ToolChainTag,
+ "TARGET":GenFdsGlobalVariable.TargetName,
+ "BUILD_DIR":GenFdsGlobalVariable.OutputDirDict[Arch],
+ "BIN_DIR":BinDir,
+ "LIB_DIR":BinDir,
+ "MODULE_BUILD_DIR":BuildDir,
+ "OUTPUT_DIR":os.path.join(BuildDir, "OUTPUT"),
+ "DEBUG_DIR":os.path.join(BuildDir, "DEBUG")
+ }
+
+ BuildRules = {}
+ for Type in BuildRuleDatabase.FileTypeList:
+ #first try getting build rule by BuildRuleFamily
+ RuleObject = BuildRuleDatabase[Type, Inf.BuildType, Arch, GenFdsGlobalVariable.BuildRuleFamily]
+ if not RuleObject:
+ # build type is always module type, but ...
+ if Inf.ModuleType != Inf.BuildType:
+ RuleObject = BuildRuleDatabase[Type, Inf.ModuleType, Arch, GenFdsGlobalVariable.BuildRuleFamily]
+ #second try getting build rule by ToolChainFamily
+ if not RuleObject:
+ RuleObject = BuildRuleDatabase[Type, Inf.BuildType, Arch, GenFdsGlobalVariable.ToolChainFamily]
+ if not RuleObject:
+ # build type is always module type, but ...
+ if Inf.ModuleType != Inf.BuildType:
+ RuleObject = BuildRuleDatabase[Type, Inf.ModuleType, Arch, GenFdsGlobalVariable.ToolChainFamily]
+ if not RuleObject:
+ continue
+ RuleObject = RuleObject.Instantiate(Macro)
+ BuildRules[Type] = RuleObject
+ for Ext in RuleObject.SourceFileExtList:
+ BuildRules[Ext] = RuleObject
+ return BuildRules
+
+ ## GetModuleCodaTargetList
+ #
+ # @param Inf: object of InfBuildData
+ # @param Arch: current arch
+ #
+ @staticmethod
+ def GetModuleCodaTargetList(Inf, Arch):
+ BuildRules = GenFdsGlobalVariable.GetBuildRules(Inf, Arch)
+ if not BuildRules:
+ return []
+
+ TargetList = set()
+ FileList = []
+
+ if not Inf.IsBinaryModule:
+ for File in Inf.Sources:
+ if File.TagName in {"", DataType.TAB_STAR, GenFdsGlobalVariable.ToolChainTag} and \
+ File.ToolChainFamily in {"", DataType.TAB_STAR, GenFdsGlobalVariable.ToolChainFamily}:
+ FileList.append((File, DataType.TAB_UNKNOWN_FILE))
+
+ for File in Inf.Binaries:
+ if File.Target in {DataType.TAB_COMMON, DataType.TAB_STAR, GenFdsGlobalVariable.TargetName}:
+ FileList.append((File, File.Type))
+
+ for File, FileType in FileList:
+ LastTarget = None
+ RuleChain = []
+ SourceList = [File]
+ Index = 0
+ while Index < len(SourceList):
+ Source = SourceList[Index]
+ Index = Index + 1
+
+ if File.IsBinary and File == Source and Inf.Binaries and File in Inf.Binaries:
+ # Skip all files that are not binary libraries
+ if not Inf.LibraryClass:
+ continue
+ RuleObject = BuildRules[DataType.TAB_DEFAULT_BINARY_FILE]
+ elif FileType in BuildRules:
+ RuleObject = BuildRules[FileType]
+ elif Source.Ext in BuildRules:
+ RuleObject = BuildRules[Source.Ext]
+ else:
+ # stop at no more rules
+ if LastTarget:
+ TargetList.add(str(LastTarget))
+ break
+
+ FileType = RuleObject.SourceFileType
+
+ # stop at STATIC_LIBRARY for library
+ if Inf.LibraryClass and FileType == DataType.TAB_STATIC_LIBRARY:
+ if LastTarget:
+ TargetList.add(str(LastTarget))
+ break
+
+ Target = RuleObject.Apply(Source)
+ if not Target:
+ if LastTarget:
+ TargetList.add(str(LastTarget))
+ break
+ elif not Target.Outputs:
+ # Only do build for target with outputs
+ TargetList.add(str(Target))
+
+ # to avoid cyclic rule
+ if FileType in RuleChain:
+ break
+
+ RuleChain.append(FileType)
+ SourceList.extend(Target.Outputs)
+ LastTarget = Target
+ FileType = DataType.TAB_UNKNOWN_FILE
+ for Cmd in Target.Commands:
+ if "$(CP)" == Cmd.split()[0]:
+ CpTarget = Cmd.split()[2]
+ TargetList.add(CpTarget)
+
+ return list(TargetList)
+
+ ## SetDir()
+ #
+ # @param OutputDir Output directory
+ # @param FdfParser FDF contents parser
+ # @param Workspace The directory of workspace
+ # @param ArchList The Arch list of platform
+ #
+ @staticmethod
+ def SetDir (OutputDir, FdfParser, WorkSpace, ArchList):
+ GenFdsGlobalVariable.VerboseLogger("GenFdsGlobalVariable.OutputDir:%s" % OutputDir)
+ GenFdsGlobalVariable.FdfParser = FdfParser
+ GenFdsGlobalVariable.WorkSpace = WorkSpace
+ GenFdsGlobalVariable.FvDir = os.path.join(GenFdsGlobalVariable.OutputDirDict[ArchList[0]], DataType.TAB_FV_DIRECTORY)
+ if not os.path.exists(GenFdsGlobalVariable.FvDir):
+ os.makedirs(GenFdsGlobalVariable.FvDir)
+ GenFdsGlobalVariable.FfsDir = os.path.join(GenFdsGlobalVariable.FvDir, 'Ffs')
+ if not os.path.exists(GenFdsGlobalVariable.FfsDir):
+ os.makedirs(GenFdsGlobalVariable.FfsDir)
+
+ #
+ # Create FV Address inf file
+ #
+ GenFdsGlobalVariable.FvAddressFileName = os.path.join(GenFdsGlobalVariable.FfsDir, 'FvAddress.inf')
+ FvAddressFile = open(GenFdsGlobalVariable.FvAddressFileName, 'w')
+ #
+ # Add [Options]
+ #
+ FvAddressFile.writelines("[options]" + DataType.TAB_LINE_BREAK)
+ BsAddress = '0'
+ for Arch in ArchList:
+ if GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].BsBaseAddress:
+ BsAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].BsBaseAddress
+ break
+
+ FvAddressFile.writelines("EFI_BOOT_DRIVER_BASE_ADDRESS = " + \
+ BsAddress + \
+ DataType.TAB_LINE_BREAK)
+
+ RtAddress = '0'
+ for Arch in reversed(ArchList):
+ temp = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].RtBaseAddress
+ if temp:
+ RtAddress = temp
+ break
+
+ FvAddressFile.writelines("EFI_RUNTIME_DRIVER_BASE_ADDRESS = " + \
+ RtAddress + \
+ DataType.TAB_LINE_BREAK)
+
+ FvAddressFile.close()
+
+ @staticmethod
+ def SetEnv(FdfParser, WorkSpace, ArchList, GlobalData):
+ GenFdsGlobalVariable.ModuleFile = WorkSpace.ModuleFile
+ GenFdsGlobalVariable.FdfParser = FdfParser
+ GenFdsGlobalVariable.WorkSpace = WorkSpace.Db
+ GenFdsGlobalVariable.ArchList = ArchList
+ GenFdsGlobalVariable.ToolChainTag = GlobalData.gGlobalDefines["TOOL_CHAIN_TAG"]
+ GenFdsGlobalVariable.TargetName = GlobalData.gGlobalDefines["TARGET"]
+ GenFdsGlobalVariable.ActivePlatform = GlobalData.gActivePlatform
+ GenFdsGlobalVariable.ConfDir = GlobalData.gConfDirectory
+ GenFdsGlobalVariable.EnableGenfdsMultiThread = GlobalData.gEnableGenfdsMultiThread
+ for Arch in ArchList:
+ GenFdsGlobalVariable.OutputDirDict[Arch] = os.path.normpath(
+ os.path.join(GlobalData.gWorkspace,
+ WorkSpace.Db.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GlobalData.gGlobalDefines['TARGET'],
+ GlobalData.gGlobalDefines['TOOLCHAIN']].OutputDirectory,
+ GlobalData.gGlobalDefines['TARGET'] +'_' + GlobalData.gGlobalDefines['TOOLCHAIN']))
+ GenFdsGlobalVariable.OutputDirFromDscDict[Arch] = os.path.normpath(
+ WorkSpace.Db.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch,
+ GlobalData.gGlobalDefines['TARGET'], GlobalData.gGlobalDefines['TOOLCHAIN']].OutputDirectory)
+ GenFdsGlobalVariable.PlatformName = WorkSpace.Db.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch,
+ GlobalData.gGlobalDefines['TARGET'],
+ GlobalData.gGlobalDefines['TOOLCHAIN']].PlatformName
+ GenFdsGlobalVariable.FvDir = os.path.join(GenFdsGlobalVariable.OutputDirDict[ArchList[0]], DataType.TAB_FV_DIRECTORY)
+ if not os.path.exists(GenFdsGlobalVariable.FvDir):
+ os.makedirs(GenFdsGlobalVariable.FvDir)
+ GenFdsGlobalVariable.FfsDir = os.path.join(GenFdsGlobalVariable.FvDir, 'Ffs')
+ if not os.path.exists(GenFdsGlobalVariable.FfsDir):
+ os.makedirs(GenFdsGlobalVariable.FfsDir)
+
+ #
+ # Create FV Address inf file
+ #
+ GenFdsGlobalVariable.FvAddressFileName = os.path.join(GenFdsGlobalVariable.FfsDir, 'FvAddress.inf')
+ FvAddressFile = open(GenFdsGlobalVariable.FvAddressFileName, 'w')
+ #
+ # Add [Options]
+ #
+ FvAddressFile.writelines("[options]" + DataType.TAB_LINE_BREAK)
+ BsAddress = '0'
+ for Arch in ArchList:
+ BsAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch,
+ GlobalData.gGlobalDefines['TARGET'],
+ GlobalData.gGlobalDefines["TOOL_CHAIN_TAG"]].BsBaseAddress
+ if BsAddress:
+ break
+
+ FvAddressFile.writelines("EFI_BOOT_DRIVER_BASE_ADDRESS = " + \
+ BsAddress + \
+ DataType.TAB_LINE_BREAK)
+
+ RtAddress = '0'
+ for Arch in reversed(ArchList):
+ temp = GenFdsGlobalVariable.WorkSpace.BuildObject[
+ GenFdsGlobalVariable.ActivePlatform, Arch, GlobalData.gGlobalDefines['TARGET'],
+ GlobalData.gGlobalDefines["TOOL_CHAIN_TAG"]].RtBaseAddress
+ if temp:
+ RtAddress = temp
+ break
+
+ FvAddressFile.writelines("EFI_RUNTIME_DRIVER_BASE_ADDRESS = " + \
+ RtAddress + \
+ DataType.TAB_LINE_BREAK)
+
+ FvAddressFile.close()
+
+ ## ReplaceWorkspaceMacro()
+ #
+ # @param String String that may contain macro
+ #
+ @staticmethod
+ def ReplaceWorkspaceMacro(String):
+ String = mws.handleWsMacro(String)
+ Str = String.replace('$(WORKSPACE)', GenFdsGlobalVariable.WorkSpaceDir)
+ if os.path.exists(Str):
+ if not os.path.isabs(Str):
+ Str = os.path.abspath(Str)
+ else:
+ Str = mws.join(GenFdsGlobalVariable.WorkSpaceDir, String)
+ return os.path.normpath(Str)
+
+ ## Check if the input files are newer than output files
+ #
+ # @param Output Path of output file
+ # @param Input Path list of input files
+ #
+ # @retval True if Output doesn't exist, or any Input is newer
+ # @retval False if all Input is older than Output
+ #
+ @staticmethod
+ def NeedsUpdate(Output, Input):
+ if not os.path.exists(Output):
+ return True
+ # always update "Output" if no "Input" given
+ if not Input:
+ return True
+
+ # if fdf file is changed after the 'Output" is generated, update the 'Output'
+ OutputTime = os.path.getmtime(Output)
+ if GenFdsGlobalVariable.FdfFileTimeStamp > OutputTime:
+ return True
+
+ for F in Input:
+ # always update "Output" if any "Input" doesn't exist
+ if not os.path.exists(F):
+ return True
+ # always update "Output" if any "Input" is newer than "Output"
+ if os.path.getmtime(F) > OutputTime:
+ return True
+ return False
+
+ @staticmethod
+ def GenerateSection(Output, Input, Type=None, CompressionType=None, Guid=None,
+ GuidHdrLen=None, GuidAttr=[], Ui=None, Ver=None, InputAlign=[], BuildNumber=None, DummyFile=None, IsMakefile=False):
+ Cmd = ["GenSec"]
+ if Type:
+ Cmd += ("-s", Type)
+ if CompressionType:
+ Cmd += ("-c", CompressionType)
+ if Guid:
+ Cmd += ("-g", Guid)
+ if DummyFile:
+ Cmd += ("--dummy", DummyFile)
+ if GuidHdrLen:
+ Cmd += ("-l", GuidHdrLen)
+ #Add each guided attribute
+ for Attr in GuidAttr:
+ Cmd += ("-r", Attr)
+ #Section Align is only for dummy section without section type
+ for SecAlign in InputAlign:
+ Cmd += ("--sectionalign", SecAlign)
+
+ CommandFile = Output + '.txt'
+ if Ui:
+ if IsMakefile:
+ if Ui == "$(MODULE_NAME)":
+ Cmd += ('-n', Ui)
+ else:
+ Cmd += ("-n", '"' + Ui + '"')
+ Cmd += ("-o", Output)
+ if ' '.join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList:
+ GenFdsGlobalVariable.SecCmdList.append(' '.join(Cmd).strip())
+ else:
+ SectionData = array('B', [0, 0, 0, 0])
+ SectionData.fromlist(array('B',Ui.encode('utf-16-le')).tolist())
+ SectionData.append(0)
+ SectionData.append(0)
+ Len = len(SectionData)
+ GenFdsGlobalVariable.SectionHeader.pack_into(SectionData, 0, Len & 0xff, (Len >> 8) & 0xff, (Len >> 16) & 0xff, 0x15)
+
+
+ DirName = os.path.dirname(Output)
+ if not CreateDirectory(DirName):
+ EdkLogger.error(None, FILE_CREATE_FAILURE, "Could not create directory %s" % DirName)
+ else:
+ if DirName == '':
+ DirName = os.getcwd()
+ if not os.access(DirName, os.W_OK):
+ EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName)
+
+ try:
+ with open(Output, "wb") as Fd:
+ SectionData.tofile(Fd)
+ Fd.flush()
+ except IOError as X:
+ EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
+
+ elif Ver:
+ Cmd += ("-n", Ver)
+ if BuildNumber:
+ Cmd += ("-j", BuildNumber)
+ Cmd += ("-o", Output)
+
+ SaveFileOnChange(CommandFile, ' '.join(Cmd), False)
+ if IsMakefile:
+ if ' '.join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList:
+ GenFdsGlobalVariable.SecCmdList.append(' '.join(Cmd).strip())
+ else:
+ if not GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]):
+ return
+ GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate section")
+ else:
+ Cmd += ("-o", Output)
+ Cmd += Input
+
+ SaveFileOnChange(CommandFile, ' '.join(Cmd), False)
+ if IsMakefile:
+ if sys.platform == "win32":
+ Cmd = ['if', 'exist', Input[0]] + Cmd
+ else:
+ Cmd = ['-test', '-e', Input[0], "&&"] + Cmd
+ if ' '.join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList:
+ GenFdsGlobalVariable.SecCmdList.append(' '.join(Cmd).strip())
+ elif GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]):
+ GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
+ GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate section")
+ if (os.path.getsize(Output) >= GenFdsGlobalVariable.LARGE_FILE_SIZE and
+ GenFdsGlobalVariable.LargeFileInFvFlags):
+ GenFdsGlobalVariable.LargeFileInFvFlags[-1] = True
+
+ @staticmethod
+ def GetAlignment (AlignString):
+ if not AlignString:
+ return 0
+ if AlignString.endswith('K'):
+ return int (AlignString.rstrip('K')) * 1024
+ if AlignString.endswith('M'):
+ return int (AlignString.rstrip('M')) * 1024 * 1024
+ if AlignString.endswith('G'):
+ return int (AlignString.rstrip('G')) * 1024 * 1024 * 1024
+ return int (AlignString)
+
+ @staticmethod
+ def GenerateFfs(Output, Input, Type, Guid, Fixed=False, CheckSum=False, Align=None,
+ SectionAlign=None, MakefilePath=None):
+ Cmd = ["GenFfs", "-t", Type, "-g", Guid]
+ mFfsValidAlign = ["0", "8", "16", "128", "512", "1K", "4K", "32K", "64K", "128K", "256K", "512K", "1M", "2M", "4M", "8M", "16M"]
+ if Fixed == True:
+ Cmd.append("-x")
+ if CheckSum:
+ Cmd.append("-s")
+ if Align:
+ if Align not in mFfsValidAlign:
+ Align = GenFdsGlobalVariable.GetAlignment (Align)
+ for index in range(0, len(mFfsValidAlign) - 1):
+ if ((Align > GenFdsGlobalVariable.GetAlignment(mFfsValidAlign[index])) and (Align <= GenFdsGlobalVariable.GetAlignment(mFfsValidAlign[index + 1]))):
+ break
+ Align = mFfsValidAlign[index + 1]
+ Cmd += ("-a", Align)
+
+ Cmd += ("-o", Output)
+ for I in range(0, len(Input)):
+ if MakefilePath:
+ Cmd += ("-oi", Input[I])
+ else:
+ Cmd += ("-i", Input[I])
+ if SectionAlign and SectionAlign[I]:
+ Cmd += ("-n", SectionAlign[I])
+
+ CommandFile = Output + '.txt'
+ SaveFileOnChange(CommandFile, ' '.join(Cmd), False)
+
+ GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
+ if MakefilePath:
+ if (tuple(Cmd), tuple(GenFdsGlobalVariable.SecCmdList), tuple(GenFdsGlobalVariable.CopyList)) not in GenFdsGlobalVariable.FfsCmdDict:
+ GenFdsGlobalVariable.FfsCmdDict[tuple(Cmd), tuple(GenFdsGlobalVariable.SecCmdList), tuple(GenFdsGlobalVariable.CopyList)] = MakefilePath
+ GenFdsGlobalVariable.SecCmdList = []
+ GenFdsGlobalVariable.CopyList = []
+ else:
+ if not GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]):
+ return
+ GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate FFS")
+
+ @staticmethod
+ def GenerateFirmwareVolume(Output, Input, BaseAddress=None, ForceRebase=None, Capsule=False, Dump=False,
+ AddressFile=None, MapFile=None, FfsList=[], FileSystemGuid=None):
+ if not GenFdsGlobalVariable.NeedsUpdate(Output, Input+FfsList):
+ return
+ GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
+
+ Cmd = ["GenFv"]
+ if BaseAddress:
+ Cmd += ("-r", BaseAddress)
+
+ if ForceRebase == False:
+ Cmd += ("-F", "FALSE")
+ elif ForceRebase == True:
+ Cmd += ("-F", "TRUE")
+
+ if Capsule:
+ Cmd.append("-c")
+ if Dump:
+ Cmd.append("-p")
+ if AddressFile:
+ Cmd += ("-a", AddressFile)
+ if MapFile:
+ Cmd += ("-m", MapFile)
+ if FileSystemGuid:
+ Cmd += ("-g", FileSystemGuid)
+ Cmd += ("-o", Output)
+ for I in Input:
+ Cmd += ("-i", I)
+
+ GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate FV")
+
+ @staticmethod
+ def GenerateFirmwareImage(Output, Input, Type="efi", SubType=None, Zero=False,
+ Strip=False, Replace=False, TimeStamp=None, Join=False,
+ Align=None, Padding=None, Convert=False, IsMakefile=False):
+ if not GenFdsGlobalVariable.NeedsUpdate(Output, Input) and not IsMakefile:
+ return
+ GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
+
+ Cmd = ["GenFw"]
+ if Type.lower() == "te":
+ Cmd.append("-t")
+ if SubType:
+ Cmd += ("-e", SubType)
+ if TimeStamp:
+ Cmd += ("-s", TimeStamp)
+ if Align:
+ Cmd += ("-a", Align)
+ if Padding:
+ Cmd += ("-p", Padding)
+ if Zero:
+ Cmd.append("-z")
+ if Strip:
+ Cmd.append("-l")
+ if Replace:
+ Cmd.append("-r")
+ if Join:
+ Cmd.append("-j")
+ if Convert:
+ Cmd.append("-m")
+ Cmd += ("-o", Output)
+ Cmd += Input
+ if IsMakefile:
+ if " ".join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList:
+ GenFdsGlobalVariable.SecCmdList.append(" ".join(Cmd).strip())
+ else:
+ GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate firmware image")
+
+ @staticmethod
+ def GenerateOptionRom(Output, EfiInput, BinaryInput, Compress=False, ClassCode=None,
+ Revision=None, DeviceId=None, VendorId=None, IsMakefile=False):
+ InputList = []
+ Cmd = ["EfiRom"]
+ if EfiInput:
+
+ if Compress:
+ Cmd.append("-ec")
+ else:
+ Cmd.append("-e")
+
+ for EfiFile in EfiInput:
+ Cmd.append(EfiFile)
+ InputList.append (EfiFile)
+
+ if BinaryInput:
+ Cmd.append("-b")
+ for BinFile in BinaryInput:
+ Cmd.append(BinFile)
+ InputList.append (BinFile)
+
+ # Check List
+ if not GenFdsGlobalVariable.NeedsUpdate(Output, InputList) and not IsMakefile:
+ return
+ GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, InputList))
+
+ if ClassCode:
+ Cmd += ("-l", ClassCode)
+ if Revision:
+ Cmd += ("-r", Revision)
+ if DeviceId:
+ Cmd += ("-i", DeviceId)
+ if VendorId:
+ Cmd += ("-f", VendorId)
+
+ Cmd += ("-o", Output)
+ if IsMakefile:
+ if " ".join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList:
+ GenFdsGlobalVariable.SecCmdList.append(" ".join(Cmd).strip())
+ else:
+ GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate option rom")
+
+ @staticmethod
+ def GuidTool(Output, Input, ToolPath, Options='', returnValue=[], IsMakefile=False):
+ if not GenFdsGlobalVariable.NeedsUpdate(Output, Input) and not IsMakefile:
+ return
+ GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
+
+ Cmd = [ToolPath, ]
+ Cmd += Options.split(' ')
+ Cmd += ("-o", Output)
+ Cmd += Input
+ if IsMakefile:
+ if " ".join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList:
+ GenFdsGlobalVariable.SecCmdList.append(" ".join(Cmd).strip())
+ else:
+ GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to call " + ToolPath, returnValue)
+
+ @staticmethod
+ def CallExternalTool (cmd, errorMess, returnValue=[]):
+
+ if type(cmd) not in (tuple, list):
+ GenFdsGlobalVariable.ErrorLogger("ToolError! Invalid parameter type in call to CallExternalTool")
+
+ if GenFdsGlobalVariable.DebugLevel != -1:
+ cmd += ('--debug', str(GenFdsGlobalVariable.DebugLevel))
+ GenFdsGlobalVariable.InfLogger (cmd)
+
+ if GenFdsGlobalVariable.VerboseMode:
+ cmd += ('-v',)
+ GenFdsGlobalVariable.InfLogger (cmd)
+ else:
+ stdout.write ('#')
+ stdout.flush()
+ GenFdsGlobalVariable.SharpCounter = GenFdsGlobalVariable.SharpCounter + 1
+ if GenFdsGlobalVariable.SharpCounter % GenFdsGlobalVariable.SharpNumberPerLine == 0:
+ stdout.write('\n')
+
+ try:
+ PopenObject = Popen(' '.join(cmd), stdout=PIPE, stderr=PIPE, shell=True)
+ except Exception as X:
+ EdkLogger.error("GenFds", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0]))
+ (out, error) = PopenObject.communicate()
+
+ while PopenObject.returncode is None:
+ PopenObject.wait()
+ if returnValue != [] and returnValue[0] != 0:
+ #get command return value
+ returnValue[0] = PopenObject.returncode
+ return
+ if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1:
+ GenFdsGlobalVariable.InfLogger ("Return Value = %d" % PopenObject.returncode)
+ GenFdsGlobalVariable.InfLogger(out.decode(encoding='utf-8', errors='ignore'))
+ GenFdsGlobalVariable.InfLogger(error.decode(encoding='utf-8', errors='ignore'))
+ if PopenObject.returncode != 0:
+ print("###", cmd)
+ EdkLogger.error("GenFds", COMMAND_FAILURE, errorMess)
+
+ @staticmethod
+ def VerboseLogger (msg):
+ EdkLogger.verbose(msg)
+
+ @staticmethod
+ def InfLogger (msg):
+ EdkLogger.info(msg)
+
+ # VBox - begin - Used by GenFds to show firmware space allocs during --quiet builds, and maybe more.
+ @staticmethod
+ def QuietLogger (msg):
+ EdkLogger.quiet(msg)
+ # VBox - end
+
+ @staticmethod
+ def ErrorLogger (msg, File=None, Line=None, ExtraData=None):
+ EdkLogger.error('GenFds', GENFDS_ERROR, msg, File, Line, ExtraData)
+
+ @staticmethod
+ def DebugLogger (Level, msg):
+ EdkLogger.debug(Level, msg)
+
+ ## MacroExtend()
+ #
+ # @param Str String that may contain macro
+ # @param MacroDict Dictionary that contains macro value pair
+ #
+ @staticmethod
+ def MacroExtend (Str, MacroDict=None, Arch=DataType.TAB_COMMON):
+ if Str is None:
+ return None
+
+ Dict = {'$(WORKSPACE)': GenFdsGlobalVariable.WorkSpaceDir,
+# '$(OUTPUT_DIRECTORY)': GenFdsGlobalVariable.OutputDirFromDsc,
+ '$(TARGET)': GenFdsGlobalVariable.TargetName,
+ '$(TOOL_CHAIN_TAG)': GenFdsGlobalVariable.ToolChainTag,
+ '$(SPACE)': ' '
+ }
+
+ if Arch != DataType.TAB_COMMON and Arch in GenFdsGlobalVariable.ArchList:
+ OutputDir = GenFdsGlobalVariable.OutputDirFromDscDict[Arch]
+ else:
+ OutputDir = GenFdsGlobalVariable.OutputDirFromDscDict[GenFdsGlobalVariable.ArchList[0]]
+
+ Dict['$(OUTPUT_DIRECTORY)'] = OutputDir
+
+ if MacroDict:
+ Dict.update(MacroDict)
+
+ for key in Dict:
+ if Str.find(key) >= 0:
+ Str = Str.replace (key, Dict[key])
+
+ if Str.find('$(ARCH)') >= 0:
+ if len(GenFdsGlobalVariable.ArchList) == 1:
+ Str = Str.replace('$(ARCH)', GenFdsGlobalVariable.ArchList[0])
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "No way to determine $(ARCH) for %s" % Str)
+
+ return Str
+
+ ## GetPcdValue()
+ #
+ # @param PcdPattern pattern that labels a PCD.
+ #
+ @staticmethod
+ def GetPcdValue (PcdPattern):
+ if PcdPattern is None:
+ return None
+ if PcdPattern.startswith('PCD('):
+ PcdPair = PcdPattern[4:].rstrip(')').strip().split('.')
+ else:
+ PcdPair = PcdPattern.strip().split('.')
+ TokenSpace = PcdPair[0]
+ TokenCName = PcdPair[1]
+
+ for Arch in GenFdsGlobalVariable.ArchList:
+ Platform = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
+ PcdDict = Platform.Pcds
+ for Key in PcdDict:
+ PcdObj = PcdDict[Key]
+ if (PcdObj.TokenCName == TokenCName) and (PcdObj.TokenSpaceGuidCName == TokenSpace):
+ if PcdObj.Type != DataType.TAB_PCDS_FIXED_AT_BUILD:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not FixedAtBuild type." % PcdPattern)
+ if PcdObj.DatumType != DataType.TAB_VOID:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not VOID* datum type." % PcdPattern)
+
+ return PcdObj.DefaultValue
+
+ for Package in GenFdsGlobalVariable.WorkSpace.GetPackageList(GenFdsGlobalVariable.ActivePlatform,
+ Arch,
+ GenFdsGlobalVariable.TargetName,
+ GenFdsGlobalVariable.ToolChainTag):
+ PcdDict = Package.Pcds
+ for Key in PcdDict:
+ PcdObj = PcdDict[Key]
+ if (PcdObj.TokenCName == TokenCName) and (PcdObj.TokenSpaceGuidCName == TokenSpace):
+ if PcdObj.Type != DataType.TAB_PCDS_FIXED_AT_BUILD:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not FixedAtBuild type." % PcdPattern)
+ if PcdObj.DatumType != DataType.TAB_VOID:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not VOID* datum type." % PcdPattern)
+
+ return PcdObj.DefaultValue
+
+ return ''
+
+## FindExtendTool()
+#
+# Find location of tools to process data
+#
+# @param KeyStringList Filter for inputs of section generation
+# @param CurrentArchList Arch list
+# @param NameGuid The Guid name
+#
+def FindExtendTool(KeyStringList, CurrentArchList, NameGuid):
+ if GenFdsGlobalVariable.GuidToolDefinition:
+ if NameGuid in GenFdsGlobalVariable.GuidToolDefinition:
+ return GenFdsGlobalVariable.GuidToolDefinition[NameGuid]
+
+ ToolDefObj = ToolDefDict((os.path.join(os.getenv("WORKSPACE"), "Conf")))
+ ToolDef = ToolDefObj.ToolDef
+ ToolDb = ToolDef.ToolsDefTxtDatabase
+ # if user not specify filter, try to deduce it from global data.
+ if KeyStringList is None or KeyStringList == []:
+ Target = GenFdsGlobalVariable.TargetName
+ ToolChain = GenFdsGlobalVariable.ToolChainTag
+ if ToolChain not in ToolDb['TOOL_CHAIN_TAG']:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Can not find external tool because tool tag %s is not defined in tools_def.txt!" % ToolChain)
+ KeyStringList = [Target + '_' + ToolChain + '_' + CurrentArchList[0]]
+ for Arch in CurrentArchList:
+ if Target + '_' + ToolChain + '_' + Arch not in KeyStringList:
+ KeyStringList.append(Target + '_' + ToolChain + '_' + Arch)
+
+ ToolPathTmp = None
+ ToolOption = None
+ for Arch in CurrentArchList:
+ MatchItem = None
+ MatchPathItem = None
+ MatchOptionsItem = None
+ for KeyString in KeyStringList:
+ KeyStringBuildTarget, KeyStringToolChain, KeyStringArch = KeyString.split('_')
+ if KeyStringArch != Arch:
+ continue
+ for Item in ToolDef.ToolsDefTxtDictionary:
+ if len(Item.split('_')) < 5:
+ continue
+ ItemTarget, ItemToolChain, ItemArch, ItemTool, ItemAttr = Item.split('_')
+ if ItemTarget == DataType.TAB_STAR:
+ ItemTarget = KeyStringBuildTarget
+ if ItemToolChain == DataType.TAB_STAR:
+ ItemToolChain = KeyStringToolChain
+ if ItemArch == DataType.TAB_STAR:
+ ItemArch = KeyStringArch
+ if ItemTarget != KeyStringBuildTarget:
+ continue
+ if ItemToolChain != KeyStringToolChain:
+ continue
+ if ItemArch != KeyStringArch:
+ continue
+ if ItemAttr != DataType.TAB_GUID:
+ # Not GUID attribute
+ continue
+ if ToolDef.ToolsDefTxtDictionary[Item].lower() != NameGuid.lower():
+ # No GUID value match
+ continue
+ if MatchItem:
+ if MatchItem.split('_')[3] == ItemTool:
+ # Tool name is the same
+ continue
+ if CalculatePriorityValue(MatchItem) > CalculatePriorityValue(Item):
+ # Current MatchItem is higher priority than new match item
+ continue
+ MatchItem = Item
+ if not MatchItem:
+ continue
+ ToolName = MatchItem.split('_')[3]
+ for Item in ToolDef.ToolsDefTxtDictionary:
+ if len(Item.split('_')) < 5:
+ continue
+ ItemTarget, ItemToolChain, ItemArch, ItemTool, ItemAttr = Item.split('_')
+ if ItemTarget == DataType.TAB_STAR:
+ ItemTarget = KeyStringBuildTarget
+ if ItemToolChain == DataType.TAB_STAR:
+ ItemToolChain = KeyStringToolChain
+ if ItemArch == DataType.TAB_STAR:
+ ItemArch = KeyStringArch
+ if ItemTarget != KeyStringBuildTarget:
+ continue
+ if ItemToolChain != KeyStringToolChain:
+ continue
+ if ItemArch != KeyStringArch:
+ continue
+ if ItemTool != ToolName:
+ continue
+ if ItemAttr == 'PATH':
+ if MatchPathItem:
+ if CalculatePriorityValue(MatchPathItem) <= CalculatePriorityValue(Item):
+ MatchPathItem = Item
+ else:
+ MatchPathItem = Item
+ if ItemAttr == 'FLAGS':
+ if MatchOptionsItem:
+ if CalculatePriorityValue(MatchOptionsItem) <= CalculatePriorityValue(Item):
+ MatchOptionsItem = Item
+ else:
+ MatchOptionsItem = Item
+ if MatchPathItem:
+ ToolPathTmp = ToolDef.ToolsDefTxtDictionary[MatchPathItem]
+ if MatchOptionsItem:
+ ToolOption = ToolDef.ToolsDefTxtDictionary[MatchOptionsItem]
+
+ for Arch in CurrentArchList:
+ MatchItem = None
+ MatchPathItem = None
+ MatchOptionsItem = None
+ for KeyString in KeyStringList:
+ KeyStringBuildTarget, KeyStringToolChain, KeyStringArch = KeyString.split('_')
+ if KeyStringArch != Arch:
+ continue
+ Platform = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, KeyStringBuildTarget, KeyStringToolChain]
+ for Item in Platform.BuildOptions:
+ if len(Item[1].split('_')) < 5:
+ continue
+ ItemTarget, ItemToolChain, ItemArch, ItemTool, ItemAttr = Item[1].split('_')
+ if ItemTarget == DataType.TAB_STAR:
+ ItemTarget = KeyStringBuildTarget
+ if ItemToolChain == DataType.TAB_STAR:
+ ItemToolChain = KeyStringToolChain
+ if ItemArch == DataType.TAB_STAR:
+ ItemArch = KeyStringArch
+ if ItemTarget != KeyStringBuildTarget:
+ continue
+ if ItemToolChain != KeyStringToolChain:
+ continue
+ if ItemArch != KeyStringArch:
+ continue
+ if ItemAttr != DataType.TAB_GUID:
+ # Not GUID attribute match
+ continue
+ if Platform.BuildOptions[Item].lower() != NameGuid.lower():
+ # No GUID value match
+ continue
+ if MatchItem:
+ if MatchItem[1].split('_')[3] == ItemTool:
+ # Tool name is the same
+ continue
+ if CalculatePriorityValue(MatchItem[1]) > CalculatePriorityValue(Item[1]):
+ # Current MatchItem is higher priority than new match item
+ continue
+ MatchItem = Item
+ if not MatchItem:
+ continue
+ ToolName = MatchItem[1].split('_')[3]
+ for Item in Platform.BuildOptions:
+ if len(Item[1].split('_')) < 5:
+ continue
+ ItemTarget, ItemToolChain, ItemArch, ItemTool, ItemAttr = Item[1].split('_')
+ if ItemTarget == DataType.TAB_STAR:
+ ItemTarget = KeyStringBuildTarget
+ if ItemToolChain == DataType.TAB_STAR:
+ ItemToolChain = KeyStringToolChain
+ if ItemArch == DataType.TAB_STAR:
+ ItemArch = KeyStringArch
+ if ItemTarget != KeyStringBuildTarget:
+ continue
+ if ItemToolChain != KeyStringToolChain:
+ continue
+ if ItemArch != KeyStringArch:
+ continue
+ if ItemTool != ToolName:
+ continue
+ if ItemAttr == 'PATH':
+ if MatchPathItem:
+ if CalculatePriorityValue(MatchPathItem[1]) <= CalculatePriorityValue(Item[1]):
+ MatchPathItem = Item
+ else:
+ MatchPathItem = Item
+ if ItemAttr == 'FLAGS':
+ if MatchOptionsItem:
+ if CalculatePriorityValue(MatchOptionsItem[1]) <= CalculatePriorityValue(Item[1]):
+ MatchOptionsItem = Item
+ else:
+ MatchOptionsItem = Item
+ if MatchPathItem:
+ ToolPathTmp = Platform.BuildOptions[MatchPathItem]
+ if MatchOptionsItem:
+ ToolOption = Platform.BuildOptions[MatchOptionsItem]
+ GenFdsGlobalVariable.GuidToolDefinition[NameGuid] = (ToolPathTmp, ToolOption)
+ return ToolPathTmp, ToolOption
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/GuidSection.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/GuidSection.py
new file mode 100755
index 00000000..3e7d9673
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/GuidSection.py
@@ -0,0 +1,278 @@
+## @file
+# process GUIDed section generation
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2018, Hewlett Packard Enterprise Development, L.P.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from . import Section
+import subprocess
+from .Ffs import SectionSuffix
+import Common.LongFilePathOs as os
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+from .GenFdsGlobalVariable import FindExtendTool
+from CommonDataClass.FdfClass import GuidSectionClassObject
+import sys
+from Common import EdkLogger
+from Common.BuildToolError import *
+from .FvImageSection import FvImageSection
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.DataType import *
+
+## generate GUIDed section
+#
+#
+class GuidSection(GuidSectionClassObject) :
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ GuidSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate GUIDed section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict=None, IsMakefile=False):
+ #
+ # Generate all section
+ #
+ self.KeyStringList = KeyStringList
+ self.CurrentArchList = GenFdsGlobalVariable.ArchList
+ if FfsInf is not None:
+ self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
+ self.NameGuid = FfsInf.__ExtendMacro__(self.NameGuid)
+ self.SectionType = FfsInf.__ExtendMacro__(self.SectionType)
+ self.CurrentArchList = [FfsInf.CurrentArch]
+
+ SectFile = tuple()
+ SectAlign = []
+ Index = 0
+ MaxAlign = None
+ if Dict is None:
+ Dict = {}
+ if self.FvAddr != []:
+ FvAddrIsSet = True
+ else:
+ FvAddrIsSet = False
+
+ if self.ProcessRequired in ("TRUE", "1"):
+ if self.FvAddr != []:
+ #no use FvAddr when the image is processed.
+ self.FvAddr = []
+ if self.FvParentAddr is not None:
+ #no use Parent Addr when the image is processed.
+ self.FvParentAddr = None
+
+ for Sect in self.SectionList:
+ Index = Index + 1
+ SecIndex = '%s.%d' % (SecNum, Index)
+ # set base address for inside FvImage
+ if isinstance(Sect, FvImageSection):
+ if self.FvAddr != []:
+ Sect.FvAddr = self.FvAddr.pop(0)
+ self.IncludeFvSection = True
+ elif isinstance(Sect, GuidSection):
+ Sect.FvAddr = self.FvAddr
+ Sect.FvParentAddr = self.FvParentAddr
+ ReturnSectList, align = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList, FfsInf, Dict, IsMakefile=IsMakefile)
+ if isinstance(Sect, GuidSection):
+ if Sect.IncludeFvSection:
+ self.IncludeFvSection = Sect.IncludeFvSection
+
+ if align is not None:
+ if MaxAlign is None:
+ MaxAlign = align
+ if GenFdsGlobalVariable.GetAlignment (align) > GenFdsGlobalVariable.GetAlignment (MaxAlign):
+ MaxAlign = align
+ if ReturnSectList != []:
+ if align is None:
+ align = "1"
+ for file in ReturnSectList:
+ SectFile += (file,)
+ SectAlign.append(align)
+
+ if MaxAlign is not None:
+ if self.Alignment is None:
+ self.Alignment = MaxAlign
+ else:
+ if GenFdsGlobalVariable.GetAlignment (MaxAlign) > GenFdsGlobalVariable.GetAlignment (self.Alignment):
+ self.Alignment = MaxAlign
+
+ OutputFile = OutputPath + \
+ os.sep + \
+ ModuleName + \
+ SUP_MODULE_SEC + \
+ SecNum + \
+ SectionSuffix['GUIDED']
+ OutputFile = os.path.normpath(OutputFile)
+
+ ExternalTool = None
+ ExternalOption = None
+ if self.NameGuid is not None:
+ ExternalTool, ExternalOption = FindExtendTool(self.KeyStringList, self.CurrentArchList, self.NameGuid)
+
+ #
+ # If not have GUID , call default
+ # GENCRC32 section
+ #
+ if self.NameGuid is None :
+ GenFdsGlobalVariable.VerboseLogger("Use GenSection function Generate CRC32 Section")
+ GenFdsGlobalVariable.GenerateSection(OutputFile, SectFile, Section.Section.SectionType[self.SectionType], InputAlign=SectAlign, IsMakefile=IsMakefile)
+ OutputFileList = []
+ OutputFileList.append(OutputFile)
+ return OutputFileList, self.Alignment
+ #or GUID not in External Tool List
+ elif ExternalTool is None:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % self.NameGuid)
+ else:
+ DummyFile = OutputFile + ".dummy"
+ #
+ # Call GenSection with DUMMY section type.
+ #
+ GenFdsGlobalVariable.GenerateSection(DummyFile, SectFile, InputAlign=SectAlign, IsMakefile=IsMakefile)
+ #
+ # Use external tool process the Output
+ #
+ TempFile = OutputPath + \
+ os.sep + \
+ ModuleName + \
+ SUP_MODULE_SEC + \
+ SecNum + \
+ '.tmp'
+ TempFile = os.path.normpath(TempFile)
+ #
+ # Remove temp file if its time stamp is older than dummy file
+ # Just in case the external tool fails at this time but succeeded before
+ # Error should be reported if the external tool does not generate a new output based on new input
+ #
+ if os.path.exists(TempFile) and os.path.exists(DummyFile) and os.path.getmtime(TempFile) < os.path.getmtime(DummyFile):
+ os.remove(TempFile)
+
+ FirstCall = False
+ CmdOption = '-e'
+ if ExternalOption is not None:
+ CmdOption = CmdOption + ' ' + ExternalOption
+ if not GenFdsGlobalVariable.EnableGenfdsMultiThread:
+ if self.ProcessRequired not in ("TRUE", "1") and self.IncludeFvSection and not FvAddrIsSet and self.FvParentAddr is not None:
+ #FirstCall is only set for the encapsulated flash FV image without process required attribute.
+ FirstCall = True
+ #
+ # Call external tool
+ #
+ ReturnValue = [1]
+ if FirstCall:
+ #first try to call the guided tool with -z option and CmdOption for the no process required guided tool.
+ GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile], ExternalTool, '-z' + ' ' + CmdOption, ReturnValue)
+
+ #
+ # when no call or first call failed, ReturnValue are not 1.
+ # Call the guided tool with CmdOption
+ #
+ if ReturnValue[0] != 0:
+ FirstCall = False
+ ReturnValue[0] = 0
+ GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile], ExternalTool, CmdOption)
+ #
+ # There is external tool which does not follow standard rule which return nonzero if tool fails
+ # The output file has to be checked
+ #
+
+ if not os.path.exists(TempFile) :
+ EdkLogger.error("GenFds", COMMAND_FAILURE, 'Fail to call %s, no output file was generated' % ExternalTool)
+
+ FileHandleIn = open(DummyFile, 'rb')
+ FileHandleIn.seek(0, 2)
+ InputFileSize = FileHandleIn.tell()
+
+ FileHandleOut = open(TempFile, 'rb')
+ FileHandleOut.seek(0, 2)
+ TempFileSize = FileHandleOut.tell()
+
+ Attribute = []
+ HeaderLength = None
+ if self.ExtraHeaderSize != -1:
+ HeaderLength = str(self.ExtraHeaderSize)
+
+ if self.ProcessRequired == "NONE" and HeaderLength is None:
+ if TempFileSize > InputFileSize:
+ FileHandleIn.seek(0)
+ BufferIn = FileHandleIn.read()
+ FileHandleOut.seek(0)
+ BufferOut = FileHandleOut.read()
+ if BufferIn == BufferOut[TempFileSize - InputFileSize:]:
+ HeaderLength = str(TempFileSize - InputFileSize)
+ #auto sec guided attribute with process required
+ if HeaderLength is None:
+ Attribute.append('PROCESSING_REQUIRED')
+
+ FileHandleIn.close()
+ FileHandleOut.close()
+
+ if FirstCall and 'PROCESSING_REQUIRED' in Attribute:
+ # Guided data by -z option on first call is the process required data. Call the guided tool with the real option.
+ GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile], ExternalTool, CmdOption)
+
+ #
+ # Call Gensection Add Section Header
+ #
+ if self.ProcessRequired in ("TRUE", "1"):
+ if 'PROCESSING_REQUIRED' not in Attribute:
+ Attribute.append('PROCESSING_REQUIRED')
+
+ if self.AuthStatusValid in ("TRUE", "1"):
+ Attribute.append('AUTH_STATUS_VALID')
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [TempFile], Section.Section.SectionType['GUIDED'],
+ Guid=self.NameGuid, GuidAttr=Attribute, GuidHdrLen=HeaderLength)
+
+ else:
+ #add input file for GenSec get PROCESSING_REQUIRED
+ GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile], ExternalTool, CmdOption, IsMakefile=IsMakefile)
+ Attribute = []
+ HeaderLength = None
+ if self.ExtraHeaderSize != -1:
+ HeaderLength = str(self.ExtraHeaderSize)
+ if self.AuthStatusValid in ("TRUE", "1"):
+ Attribute.append('AUTH_STATUS_VALID')
+ if self.ProcessRequired == "NONE" and HeaderLength is None:
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [TempFile], Section.Section.SectionType['GUIDED'],
+ Guid=self.NameGuid, GuidAttr=Attribute,
+ GuidHdrLen=HeaderLength, DummyFile=DummyFile, IsMakefile=IsMakefile)
+ else:
+ if self.ProcessRequired in ("TRUE", "1"):
+ if 'PROCESSING_REQUIRED' not in Attribute:
+ Attribute.append('PROCESSING_REQUIRED')
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [TempFile], Section.Section.SectionType['GUIDED'],
+ Guid=self.NameGuid, GuidAttr=Attribute,
+ GuidHdrLen=HeaderLength, IsMakefile=IsMakefile)
+
+ OutputFileList = []
+ OutputFileList.append(OutputFile)
+ if 'PROCESSING_REQUIRED' in Attribute:
+ # reset guided section alignment to none for the processed required guided data
+ self.Alignment = None
+ self.IncludeFvSection = False
+ self.ProcessRequired = "TRUE"
+ if IsMakefile and self.Alignment is not None and self.Alignment.strip() == '0':
+ self.Alignment = '1'
+ return OutputFileList, self.Alignment
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptRomFileStatement.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptRomFileStatement.py
new file mode 100755
index 00000000..f6d8a529
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptRomFileStatement.py
@@ -0,0 +1,48 @@
+## @file
+# process OptionROM generation from FILE statement
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.LongFilePathOs as os
+
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+##
+#
+#
+class OptRomFileStatement:
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.FileName = None
+ self.FileType = None
+ self.OverrideAttribs = None
+
+ ## GenFfs() method
+ #
+ # Generate FFS
+ #
+ # @param self The object pointer
+ # @param Dict dictionary contains macro and value pair
+ # @retval string Generated FFS file name
+ #
+ def GenFfs(self, Dict = None, IsMakefile=False):
+
+ if Dict is None:
+ Dict = {}
+
+ if self.FileName is not None:
+ self.FileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
+
+ return self.FileName
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptRomInfStatement.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
new file mode 100755
index 00000000..43fd46ad
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
@@ -0,0 +1,159 @@
+## @file
+# process OptionROM generation from INF statement
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from . import RuleSimpleFile
+from . import RuleComplexFile
+from . import Section
+import Common.GlobalData as GlobalData
+
+from Common.DataType import *
+from Common.StringUtils import *
+from .FfsInfStatement import FfsInfStatement
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+
+##
+#
+#
+class OptRomInfStatement (FfsInfStatement):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FfsInfStatement.__init__(self)
+ self.OverrideAttribs = None
+
+ ## __GetOptRomParams() method
+ #
+ # Parse inf file to get option ROM related parameters
+ #
+ # @param self The object pointer
+ #
+ def __GetOptRomParams(self):
+ if self.OverrideAttribs is None:
+ self.OverrideAttribs = OverrideAttribs()
+
+ if self.OverrideAttribs.NeedCompress is None:
+ self.OverrideAttribs.NeedCompress = self.OptRomDefs.get ('PCI_COMPRESS')
+ if self.OverrideAttribs.NeedCompress is not None:
+ if self.OverrideAttribs.NeedCompress.upper() not in ('TRUE', 'FALSE'):
+ GenFdsGlobalVariable.ErrorLogger( "Expected TRUE/FALSE for PCI_COMPRESS: %s" %self.InfFileName)
+ self.OverrideAttribs.NeedCompress = \
+ self.OverrideAttribs.NeedCompress.upper() == 'TRUE'
+
+ if self.OverrideAttribs.PciVendorId is None:
+ self.OverrideAttribs.PciVendorId = self.OptRomDefs.get ('PCI_VENDOR_ID')
+
+ if self.OverrideAttribs.PciClassCode is None:
+ self.OverrideAttribs.PciClassCode = self.OptRomDefs.get ('PCI_CLASS_CODE')
+
+ if self.OverrideAttribs.PciDeviceId is None:
+ self.OverrideAttribs.PciDeviceId = self.OptRomDefs.get ('PCI_DEVICE_ID')
+
+ if self.OverrideAttribs.PciRevision is None:
+ self.OverrideAttribs.PciRevision = self.OptRomDefs.get ('PCI_REVISION')
+
+# InfObj = GenFdsGlobalVariable.WorkSpace.BuildObject[self.PathClassObj, self.CurrentArch]
+# RecordList = InfObj._RawData[MODEL_META_DATA_HEADER, InfObj._Arch, InfObj._Platform]
+# for Record in RecordList:
+# Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
+# Name = Record[0]
+ ## GenFfs() method
+ #
+ # Generate FFS
+ #
+ # @param self The object pointer
+ # @retval string Generated .efi file name
+ #
+ def GenFfs(self, IsMakefile=False):
+ #
+ # Parse Inf file get Module related information
+ #
+
+ self.__InfParse__()
+ self.__GetOptRomParams()
+ #
+ # Get the rule of how to generate Ffs file
+ #
+ Rule = self.__GetRule__()
+ GenFdsGlobalVariable.VerboseLogger( "Packing binaries from inf file : %s" %self.InfFileName)
+ #
+ # For the rule only has simpleFile
+ #
+ if isinstance (Rule, RuleSimpleFile.RuleSimpleFile) :
+ EfiOutputList = self.__GenSimpleFileSection__(Rule, IsMakefile=IsMakefile)
+ return EfiOutputList
+ #
+ # For Rule has ComplexFile
+ #
+ elif isinstance(Rule, RuleComplexFile.RuleComplexFile):
+ EfiOutputList = self.__GenComplexFileSection__(Rule, IsMakefile=IsMakefile)
+ return EfiOutputList
+
+ ## __GenSimpleFileSection__() method
+ #
+ # Get .efi files according to simple rule.
+ #
+ # @param self The object pointer
+ # @param Rule The rule object used to generate section
+ # @retval string File name of the generated section file
+ #
+ def __GenSimpleFileSection__(self, Rule, IsMakefile = False):
+ #
+ # Prepare the parameter of GenSection
+ #
+
+ OutputFileList = []
+ if Rule.FileName is not None:
+ GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
+ OutputFileList.append(GenSecInputFile)
+ else:
+ OutputFileList, IsSect = Section.Section.GetFileList(self, '', Rule.FileExtension)
+
+ return OutputFileList
+
+
+ ## __GenComplexFileSection__() method
+ #
+ # Get .efi by sections in complex Rule
+ #
+ # @param self The object pointer
+ # @param Rule The rule object used to generate section
+ # @retval string File name of the generated section file
+ #
+ def __GenComplexFileSection__(self, Rule, IsMakefile=False):
+
+ OutputFileList = []
+ for Sect in Rule.SectionList:
+ if Sect.SectionType == BINARY_FILE_TYPE_PE32:
+ if Sect.FileName is not None:
+ GenSecInputFile = self.__ExtendMacro__(Sect.FileName)
+ OutputFileList.append(GenSecInputFile)
+ else:
+ FileList, IsSect = Section.Section.GetFileList(self, '', Sect.FileExtension)
+ OutputFileList.extend(FileList)
+
+ return OutputFileList
+
+class OverrideAttribs:
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+
+ self.PciVendorId = None
+ self.PciClassCode = None
+ self.PciDeviceId = None
+ self.PciRevision = None
+ self.NeedCompress = None
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptionRom.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptionRom.py
new file mode 100755
index 00000000..b609c15b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptionRom.py
@@ -0,0 +1,131 @@
+## @file
+# process OptionROM generation
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.LongFilePathOs as os
+import subprocess
+
+from . import OptRomInfStatement
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+from CommonDataClass.FdfClass import OptionRomClassObject
+from Common.Misc import SaveFileOnChange
+from Common import EdkLogger
+from Common.BuildToolError import *
+
+##
+#
+#
+class OPTIONROM (OptionRomClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self, Name = ""):
+ OptionRomClassObject.__init__(self)
+ self.DriverName = Name
+
+ ## AddToBuffer()
+ #
+ # Generate Option ROM
+ #
+ # @param self The object pointer
+ # @param Buffer The buffer generated OptROM data will be put
+ # @retval string Generated OptROM file path
+ #
+ def AddToBuffer (self, Buffer, Flag=False) :
+ if not Flag:
+ GenFdsGlobalVariable.InfLogger( "\nGenerating %s Option ROM ..." %self.DriverName)
+
+ EfiFileList = []
+ BinFileList = []
+
+ # Process Modules in FfsList
+ for FfsFile in self.FfsList :
+
+ if isinstance(FfsFile, OptRomInfStatement.OptRomInfStatement):
+ FilePathNameList = FfsFile.GenFfs(IsMakefile=Flag)
+ if len(FilePathNameList) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s not produce .efi files, so NO file could be put into option ROM." % (FfsFile.InfFileName))
+ if FfsFile.OverrideAttribs is None:
+ EfiFileList.extend(FilePathNameList)
+ else:
+ FileName = os.path.basename(FilePathNameList[0])
+ TmpOutputDir = os.path.join(GenFdsGlobalVariable.FvDir, self.DriverName, FfsFile.CurrentArch)
+ if not os.path.exists(TmpOutputDir) :
+ os.makedirs(TmpOutputDir)
+ TmpOutputFile = os.path.join(TmpOutputDir, FileName+'.tmp')
+
+ GenFdsGlobalVariable.GenerateOptionRom(TmpOutputFile,
+ FilePathNameList,
+ [],
+ FfsFile.OverrideAttribs.NeedCompress,
+ FfsFile.OverrideAttribs.PciClassCode,
+ FfsFile.OverrideAttribs.PciRevision,
+ FfsFile.OverrideAttribs.PciDeviceId,
+ FfsFile.OverrideAttribs.PciVendorId,
+ IsMakefile = Flag)
+ BinFileList.append(TmpOutputFile)
+ else:
+ FilePathName = FfsFile.GenFfs(IsMakefile=Flag)
+ if FfsFile.OverrideAttribs is not None:
+ FileName = os.path.basename(FilePathName)
+ TmpOutputDir = os.path.join(GenFdsGlobalVariable.FvDir, self.DriverName, FfsFile.CurrentArch)
+ if not os.path.exists(TmpOutputDir) :
+ os.makedirs(TmpOutputDir)
+ TmpOutputFile = os.path.join(TmpOutputDir, FileName+'.tmp')
+
+ GenFdsGlobalVariable.GenerateOptionRom(TmpOutputFile,
+ [FilePathName],
+ [],
+ FfsFile.OverrideAttribs.NeedCompress,
+ FfsFile.OverrideAttribs.PciClassCode,
+ FfsFile.OverrideAttribs.PciRevision,
+ FfsFile.OverrideAttribs.PciDeviceId,
+ FfsFile.OverrideAttribs.PciVendorId,
+ IsMakefile=Flag)
+ BinFileList.append(TmpOutputFile)
+ else:
+ if FfsFile.FileType == 'EFI':
+ EfiFileList.append(FilePathName)
+ else:
+ BinFileList.append(FilePathName)
+
+ #
+ # Call EfiRom tool
+ #
+ OutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.DriverName)
+ OutputFile = OutputFile + '.rom'
+
+ GenFdsGlobalVariable.GenerateOptionRom(
+ OutputFile,
+ EfiFileList,
+ BinFileList,
+ IsMakefile=Flag)
+
+ if not Flag:
+ GenFdsGlobalVariable.InfLogger( "\nGenerate %s Option ROM Successfully" %self.DriverName)
+ GenFdsGlobalVariable.SharpCounter = 0
+
+ return OutputFile
+
+class OverrideAttribs:
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+
+ self.PciVendorId = None
+ self.PciClassCode = None
+ self.PciDeviceId = None
+ self.PciRevision = None
+ self.NeedCompress = None
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Region.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Region.py
new file mode 100755
index 00000000..75909096
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Region.py
@@ -0,0 +1,348 @@
+## @file
+# process FD Region generation
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from struct import *
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+from io import BytesIO
+import string
+import Common.LongFilePathOs as os
+from stat import *
+from Common import EdkLogger
+from Common.BuildToolError import *
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from Common.DataType import BINARY_FILE_TYPE_FV
+
+## generate Region
+#
+#
+class Region(object):
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.Offset = None # The begin position of the Region
+ self.Size = None # The Size of the Region
+ self.PcdOffset = None
+ self.PcdSize = None
+ self.SetVarDict = {}
+ self.RegionType = None
+ self.RegionDataList = []
+
+ ## PadBuffer()
+ #
+ # Add padding bytes to the Buffer
+ #
+ # @param Buffer The buffer the generated region data will be put
+ # in
+ # @param ErasePolarity Flash erase polarity
+ # @param Size Number of padding bytes requested
+ #
+
+ def PadBuffer(self, Buffer, ErasePolarity, Size):
+ if Size > 0:
+ if (ErasePolarity == '1') :
+ PadByte = pack('B', 0xFF)
+ else:
+ PadByte = pack('B', 0)
+ for i in range(0, Size):
+ Buffer.write(PadByte)
+
+ ## AddToBuffer()
+ #
+ # Add region data to the Buffer
+ #
+ # @param self The object pointer
+ # @param Buffer The buffer generated region data will be put
+ # @param BaseAddress base address of region
+ # @param BlockSize block size of region
+ # @param BlockNum How many blocks in region
+ # @param ErasePolarity Flash erase polarity
+ # @param MacroDict macro value pair
+ # @retval string Generated FV file path
+ #
+
+ def AddToBuffer(self, Buffer, BaseAddress, BlockSizeList, ErasePolarity, ImageBinDict, MacroDict=None, Flag=False):
+ Size = self.Size
+ if MacroDict is None:
+ MacroDict = {}
+ if not Flag:
+ GenFdsGlobalVariable.InfLogger('\nGenerate Region at Offset 0x%X' % self.Offset)
+ GenFdsGlobalVariable.InfLogger(" Region Size = 0x%X" % Size)
+ GenFdsGlobalVariable.SharpCounter = 0
+ if Flag and (self.RegionType != BINARY_FILE_TYPE_FV):
+ return
+
+ if self.RegionType == BINARY_FILE_TYPE_FV:
+ #
+ # Get Fv from FvDict
+ #
+ self.FvAddress = int(BaseAddress, 16) + self.Offset
+ FvBaseAddress = '0x%X' % self.FvAddress
+ FvOffset = 0
+ for RegionData in self.RegionDataList:
+ FileName = None
+ if RegionData.endswith(".fv"):
+ RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)
+ if not Flag:
+ GenFdsGlobalVariable.InfLogger(' Region FV File Name = .fv : %s' % RegionData)
+ if RegionData[1] != ':' :
+ RegionData = mws.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData)
+ if not os.path.exists(RegionData):
+ EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData)
+
+ FileName = RegionData
+ elif RegionData.upper() + 'fv' in ImageBinDict:
+ if not Flag:
+ GenFdsGlobalVariable.InfLogger(' Region Name = FV')
+ FileName = ImageBinDict[RegionData.upper() + 'fv']
+ else:
+ #
+ # Generate FvImage.
+ #
+ FvObj = None
+ if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
+ FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[RegionData.upper()]
+
+ if FvObj is not None :
+ if not Flag:
+ GenFdsGlobalVariable.InfLogger(' Region Name = FV')
+ #
+ # Call GenFv tool
+ #
+ self.BlockInfoOfRegion(BlockSizeList, FvObj)
+ self.FvAddress = self.FvAddress + FvOffset
+ FvAlignValue = GenFdsGlobalVariable.GetAlignment(FvObj.FvAlignment)
+ if self.FvAddress % FvAlignValue != 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment))
+ FvBuffer = BytesIO()
+ FvBaseAddress = '0x%X' % self.FvAddress
+ BlockSize = None
+ BlockNum = None
+ FvObj.AddToBuffer(FvBuffer, FvBaseAddress, BlockSize, BlockNum, ErasePolarity, Flag=Flag)
+ if Flag:
+ continue
+
+ FvBufferLen = len(FvBuffer.getvalue())
+ if FvBufferLen > Size:
+ FvBuffer.close()
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "Size of FV (%s) is larger than Region Size 0x%X specified." % (RegionData, Size))
+ #
+ # Put the generated image into FD buffer.
+ #
+ Buffer.write(FvBuffer.getvalue())
+ FvBuffer.close()
+ FvOffset = FvOffset + FvBufferLen
+ Size = Size - FvBufferLen
+ continue
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (RegionData))
+ #
+ # Add the exist Fv image into FD buffer
+ #
+ if not Flag:
+ if FileName is not None:
+ FileLength = os.stat(FileName)[ST_SIZE]
+ if FileLength > Size:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "Size of FV File (%s) is larger than Region Size 0x%X specified." \
+ % (RegionData, Size))
+ BinFile = open(FileName, 'rb')
+ Buffer.write(BinFile.read())
+ BinFile.close()
+ Size = Size - FileLength
+ #
+ # Pad the left buffer
+ #
+ if not Flag:
+ self.PadBuffer(Buffer, ErasePolarity, Size)
+
+ if self.RegionType == 'CAPSULE':
+ #
+ # Get Capsule from Capsule Dict
+ #
+ for RegionData in self.RegionDataList:
+ if RegionData.endswith(".cap"):
+ RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)
+ GenFdsGlobalVariable.InfLogger(' Region CAPSULE Image Name = .cap : %s' % RegionData)
+ if RegionData[1] != ':' :
+ RegionData = mws.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData)
+ if not os.path.exists(RegionData):
+ EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData)
+
+ FileName = RegionData
+ elif RegionData.upper() + 'cap' in ImageBinDict:
+ GenFdsGlobalVariable.InfLogger(' Region Name = CAPSULE')
+ FileName = ImageBinDict[RegionData.upper() + 'cap']
+ else:
+ #
+ # Generate Capsule image and Put it into FD buffer
+ #
+ CapsuleObj = None
+ if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict:
+ CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict[RegionData.upper()]
+
+ if CapsuleObj is not None :
+ CapsuleObj.CapsuleName = RegionData.upper()
+ GenFdsGlobalVariable.InfLogger(' Region Name = CAPSULE')
+ #
+ # Call GenFv tool to generate Capsule Image
+ #
+ FileName = CapsuleObj.GenCapsule()
+ CapsuleObj.CapsuleName = None
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Capsule (%s) is NOT described in FDF file!" % (RegionData))
+
+ #
+ # Add the capsule image into FD buffer
+ #
+ FileLength = os.stat(FileName)[ST_SIZE]
+ if FileLength > Size:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "Size 0x%X of Capsule File (%s) is larger than Region Size 0x%X specified." \
+ % (FileLength, RegionData, Size))
+ BinFile = open(FileName, 'rb')
+ Buffer.write(BinFile.read())
+ BinFile.close()
+ Size = Size - FileLength
+ #
+ # Pad the left buffer
+ #
+ self.PadBuffer(Buffer, ErasePolarity, Size)
+
+ if self.RegionType in ('FILE', 'INF'):
+ for RegionData in self.RegionDataList:
+ if self.RegionType == 'INF':
+ RegionData.__InfParse__(None)
+ if len(RegionData.BinFileList) != 1:
+ EdkLogger.error('GenFds', GENFDS_ERROR, 'INF in FD region can only contain one binary: %s' % RegionData)
+ File = RegionData.BinFileList[0]
+ RegionData = RegionData.PatchEfiFile(File.Path, File.Type)
+ else:
+ RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)
+ if RegionData[1] != ':' :
+ RegionData = mws.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData)
+ if not os.path.exists(RegionData):
+ EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData)
+ #
+ # Add the file image into FD buffer
+ #
+ FileLength = os.stat(RegionData)[ST_SIZE]
+ if FileLength > Size:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "Size of File (%s) is larger than Region Size 0x%X specified." \
+ % (RegionData, Size))
+ GenFdsGlobalVariable.InfLogger(' Region File Name = %s' % RegionData)
+ BinFile = open(RegionData, 'rb')
+ Buffer.write(BinFile.read())
+ BinFile.close()
+ Size = Size - FileLength
+ #
+ # Pad the left buffer
+ #
+ self.PadBuffer(Buffer, ErasePolarity, Size)
+
+ if self.RegionType == 'DATA' :
+ GenFdsGlobalVariable.InfLogger(' Region Name = DATA')
+ DataSize = 0
+ for RegionData in self.RegionDataList:
+ Data = RegionData.split(',')
+ DataSize = DataSize + len(Data)
+ if DataSize > Size:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Size of DATA is larger than Region Size ")
+ else:
+ for item in Data :
+ Buffer.write(pack('B', int(item, 16)))
+ Size = Size - DataSize
+ #
+ # Pad the left buffer
+ #
+ self.PadBuffer(Buffer, ErasePolarity, Size)
+
+ if self.RegionType is None:
+ GenFdsGlobalVariable.InfLogger(' Region Name = None')
+ self.PadBuffer(Buffer, ErasePolarity, Size)
+
+ ## BlockSizeOfRegion()
+ #
+ # @param BlockSizeList List of block information
+ # @param FvObj The object for FV
+ #
+ def BlockInfoOfRegion(self, BlockSizeList, FvObj):
+ Start = 0
+ End = 0
+ RemindingSize = self.Size
+ ExpectedList = []
+ for (BlockSize, BlockNum, pcd) in BlockSizeList:
+ End = Start + BlockSize * BlockNum
+ # region not started yet
+ if self.Offset >= End:
+ Start = End
+ continue
+ # region located in current blocks
+ else:
+ # region ended within current blocks
+ if self.Offset + self.Size <= End:
+ ExpectedList.append((BlockSize, (RemindingSize + BlockSize - 1) // BlockSize))
+ break
+ # region not ended yet
+ else:
+ # region not started in middle of current blocks
+ if self.Offset <= Start:
+ UsedBlockNum = BlockNum
+ # region started in middle of current blocks
+ else:
+ UsedBlockNum = (End - self.Offset) // BlockSize
+ Start = End
+ ExpectedList.append((BlockSize, UsedBlockNum))
+ RemindingSize -= BlockSize * UsedBlockNum
+
+ if FvObj.BlockSizeList == []:
+ FvObj.BlockSizeList = ExpectedList
+ else:
+ # first check whether FvObj.BlockSizeList items have only "BlockSize" or "NumBlocks",
+ # if so, use ExpectedList
+ for Item in FvObj.BlockSizeList:
+ if Item[0] is None or Item[1] is None:
+ FvObj.BlockSizeList = ExpectedList
+ break
+ # make sure region size is no smaller than the summed block size in FV
+ Sum = 0
+ for Item in FvObj.BlockSizeList:
+ Sum += Item[0] * Item[1]
+ if self.Size < Sum:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Total Size of FV %s 0x%x is larger than Region Size 0x%x "
+ % (FvObj.UiFvName, Sum, self.Size))
+ # check whether the BlockStatements in FV section is appropriate
+ ExpectedListData = ''
+ for Item in ExpectedList:
+ ExpectedListData += "BlockSize = 0x%x\n\tNumBlocks = 0x%x\n\t" % Item
+ Index = 0
+ for Item in FvObj.BlockSizeList:
+ if Item[0] != ExpectedList[Index][0]:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement"
+ % FvObj.UiFvName, ExtraData=ExpectedListData)
+ elif Item[1] != ExpectedList[Index][1]:
+ if (Item[1] < ExpectedList[Index][1]) and (Index == len(FvObj.BlockSizeList) - 1):
+ break;
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "BlockStatements of FV %s are not align with FD's, suggested FV BlockStatement"
+ % FvObj.UiFvName, ExtraData=ExpectedListData)
+ else:
+ Index += 1
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Rule.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Rule.py
new file mode 100755
index 00000000..b14114fc
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Rule.py
@@ -0,0 +1,23 @@
+## @file
+# Rule object for generating FFS
+#
+# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from CommonDataClass.FdfClass import RuleClassObject
+
+## Rule base class
+#
+#
+class Rule(RuleClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ RuleClassObject.__init__(self)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/RuleComplexFile.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/RuleComplexFile.py
new file mode 100755
index 00000000..35af9bc1
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/RuleComplexFile.py
@@ -0,0 +1,25 @@
+## @file
+# Complex Rule object for generating FFS
+#
+# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from . import Rule
+from CommonDataClass.FdfClass import RuleComplexFileClassObject
+
+## complex rule
+#
+#
+class RuleComplexFile(RuleComplexFileClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ RuleComplexFileClassObject.__init__(self)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/RuleSimpleFile.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/RuleSimpleFile.py
new file mode 100755
index 00000000..4f4d75f0
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/RuleSimpleFile.py
@@ -0,0 +1,25 @@
+## @file
+# Simple Rule object for generating FFS
+#
+# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from . import Rule
+from CommonDataClass.FdfClass import RuleSimpleFileClassObject
+
+## simple rule
+#
+#
+class RuleSimpleFile (RuleSimpleFileClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ RuleSimpleFileClassObject.__init__(self)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Section.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Section.py
new file mode 100755
index 00000000..a338cffe
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/Section.py
@@ -0,0 +1,153 @@
+## @file
+# section base class
+#
+# Copyright (c) 2007-2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from CommonDataClass.FdfClass import SectionClassObject
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+import Common.LongFilePathOs as os, glob
+from Common import EdkLogger
+from Common.BuildToolError import *
+from Common.DataType import *
+
+## section base class
+#
+#
+class Section (SectionClassObject):
+ SectionType = {
+ 'RAW' : 'EFI_SECTION_RAW',
+ 'FREEFORM' : 'EFI_SECTION_FREEFORM_SUBTYPE_GUID',
+ BINARY_FILE_TYPE_PE32 : 'EFI_SECTION_PE32',
+ BINARY_FILE_TYPE_PIC : 'EFI_SECTION_PIC',
+ BINARY_FILE_TYPE_TE : 'EFI_SECTION_TE',
+ 'FV_IMAGE' : 'EFI_SECTION_FIRMWARE_VOLUME_IMAGE',
+ BINARY_FILE_TYPE_DXE_DEPEX : 'EFI_SECTION_DXE_DEPEX',
+ BINARY_FILE_TYPE_PEI_DEPEX : 'EFI_SECTION_PEI_DEPEX',
+ 'GUIDED' : 'EFI_SECTION_GUID_DEFINED',
+ 'COMPRESS' : 'EFI_SECTION_COMPRESSION',
+ BINARY_FILE_TYPE_UI : 'EFI_SECTION_USER_INTERFACE',
+ BINARY_FILE_TYPE_SMM_DEPEX : 'EFI_SECTION_SMM_DEPEX'
+ }
+
+ BinFileType = {
+ BINARY_FILE_TYPE_GUID : '.guid',
+ 'ACPI' : '.acpi',
+ 'ASL' : '.asl' ,
+ BINARY_FILE_TYPE_UEFI_APP : '.app',
+ BINARY_FILE_TYPE_LIB : '.lib',
+ BINARY_FILE_TYPE_PE32 : '.pe32',
+ BINARY_FILE_TYPE_PIC : '.pic',
+ BINARY_FILE_TYPE_PEI_DEPEX : '.depex',
+ 'SEC_PEI_DEPEX' : '.depex',
+ BINARY_FILE_TYPE_TE : '.te',
+ BINARY_FILE_TYPE_UNI_VER : '.ver',
+ BINARY_FILE_TYPE_VER : '.ver',
+ BINARY_FILE_TYPE_UNI_UI : '.ui',
+ BINARY_FILE_TYPE_UI : '.ui',
+ BINARY_FILE_TYPE_BIN : '.bin',
+ 'RAW' : '.raw',
+ 'COMPAT16' : '.comp16',
+ BINARY_FILE_TYPE_FV : '.fv'
+ }
+
+ SectFileType = {
+ 'SEC_GUID' : '.sec' ,
+ 'SEC_PE32' : '.sec' ,
+ 'SEC_PIC' : '.sec',
+ 'SEC_TE' : '.sec',
+ 'SEC_VER' : '.sec',
+ 'SEC_UI' : '.sec',
+ 'SEC_COMPAT16' : '.sec',
+ 'SEC_BIN' : '.sec'
+ }
+
+ ToolGuid = {
+ '0xa31280ad-0x481e-0x41b6-0x95e8-0x127f-0x4c984779' : 'TianoCompress',
+ '0xee4e5898-0x3914-0x4259-0x9d6e-0xdc7b-0xd79403cf' : 'LzmaCompress'
+ }
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # virtual function
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ #
+ def GenSection(self, OutputPath, GuidName, SecNum, keyStringList, FfsInf = None, Dict = None):
+ pass
+
+ ## GetFileList() method
+ #
+ # Generate compressed section
+ #
+ # @param self The object pointer
+ # @param FfsInf FfsInfStatement object that contains file list
+ # @param FileType File type to get
+ # @param FileExtension File extension to get
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (File list, boolean)
+ #
+ def GetFileList(FfsInf, FileType, FileExtension, Dict = None, IsMakefile=False, SectionType=None):
+ IsSect = FileType in Section.SectFileType
+
+ if FileExtension is not None:
+ Suffix = FileExtension
+ elif IsSect :
+ Suffix = Section.SectionType.get(FileType)
+ else:
+ Suffix = Section.BinFileType.get(FileType)
+ if FfsInf is None:
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'Inf File does not exist!')
+
+ FileList = []
+ if FileType is not None:
+ for File in FfsInf.BinFileList:
+ if File.Arch == TAB_ARCH_COMMON or FfsInf.CurrentArch == File.Arch:
+ if File.Type == FileType or (int(FfsInf.PiSpecVersion, 16) >= 0x0001000A \
+ and FileType == 'DXE_DPEX' and File.Type == BINARY_FILE_TYPE_SMM_DEPEX) \
+ or (FileType == BINARY_FILE_TYPE_TE and File.Type == BINARY_FILE_TYPE_PE32):
+ if TAB_STAR in FfsInf.TargetOverrideList or File.Target == TAB_STAR or File.Target in FfsInf.TargetOverrideList or FfsInf.TargetOverrideList == []:
+ FileList.append(FfsInf.PatchEfiFile(File.Path, File.Type))
+ else:
+ GenFdsGlobalVariable.InfLogger ("\nBuild Target \'%s\' of File %s is not in the Scope of %s specified by INF %s in FDF" %(File.Target, File.File, FfsInf.TargetOverrideList, FfsInf.InfFileName))
+ else:
+ GenFdsGlobalVariable.VerboseLogger ("\nFile Type \'%s\' of File %s in %s is not same with file type \'%s\' from Rule in FDF" %(File.Type, File.File, FfsInf.InfFileName, FileType))
+ else:
+ GenFdsGlobalVariable.InfLogger ("\nCurrent ARCH \'%s\' of File %s is not in the Support Arch Scope of %s specified by INF %s in FDF" %(FfsInf.CurrentArch, File.File, File.Arch, FfsInf.InfFileName))
+
+ elif FileType is None and SectionType == BINARY_FILE_TYPE_RAW:
+ for File in FfsInf.BinFileList:
+ if File.Ext == Suffix:
+ FileList.append(File.Path)
+
+ if (not IsMakefile and Suffix is not None and os.path.exists(FfsInf.EfiOutputPath)) or (IsMakefile and Suffix is not None):
+ if not FileList:
+ SuffixMap = FfsInf.GetFinalTargetSuffixMap()
+ if Suffix in SuffixMap:
+ FileList.extend(SuffixMap[Suffix])
+
+ #Process the file lists is alphabetical for a same section type
+ if len (FileList) > 1:
+ FileList.sort()
+
+ return FileList, IsSect
+ GetFileList = staticmethod(GetFileList)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/UiSection.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/UiSection.py
new file mode 100755
index 00000000..a8aa94c0
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/UiSection.py
@@ -0,0 +1,74 @@
+## @file
+# process UI section generation
+#
+# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from . import Section
+from .Ffs import SectionSuffix
+import subprocess
+import Common.LongFilePathOs as os
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+from CommonDataClass.FdfClass import UiSectionClassObject
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.DataType import *
+
+## generate UI section
+#
+#
+class UiSection (UiSectionClassObject):
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ UiSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate UI section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict=None, IsMakefile = False):
+ #
+ # Prepare the parameter of GenSection
+ #
+ if FfsInf is not None:
+ self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
+ self.StringData = FfsInf.__ExtendMacro__(self.StringData)
+ self.FileName = FfsInf.__ExtendMacro__(self.FileName)
+
+ OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get(BINARY_FILE_TYPE_UI))
+
+ if self.StringData is not None :
+ NameString = self.StringData
+ elif self.FileName is not None:
+ if Dict is None:
+ Dict = {}
+ FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
+ FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
+ FileObj = open(FileNameStr, 'r')
+ NameString = FileObj.read()
+ FileObj.close()
+ else:
+ NameString = ''
+ GenFdsGlobalVariable.GenerateSection(OutputFile, None, 'EFI_SECTION_USER_INTERFACE', Ui=NameString, IsMakefile=IsMakefile)
+
+ OutputFileList = []
+ OutputFileList.append(OutputFile)
+ return OutputFileList, self.Alignment
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/VerSection.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/VerSection.py
new file mode 100755
index 00000000..37ba96b9
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/VerSection.py
@@ -0,0 +1,76 @@
+## @file
+# process Version section generation
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from .Ffs import SectionSuffix
+import Common.LongFilePathOs as os
+from .GenFdsGlobalVariable import GenFdsGlobalVariable
+from CommonDataClass.FdfClass import VerSectionClassObject
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.DataType import SUP_MODULE_SEC
+
+## generate version section
+#
+#
+class VerSection (VerSectionClassObject):
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ VerSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate version section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict=None, IsMakefile = False):
+ #
+ # Prepare the parameter of GenSection
+ #
+ if FfsInf:
+ self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
+ self.BuildNum = FfsInf.__ExtendMacro__(self.BuildNum)
+ self.StringData = FfsInf.__ExtendMacro__(self.StringData)
+ self.FileName = FfsInf.__ExtendMacro__(self.FileName)
+
+ OutputFile = os.path.join(OutputPath,
+ ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get('VERSION'))
+ OutputFile = os.path.normpath(OutputFile)
+
+ # Get String Data
+ StringData = ''
+ if self.StringData:
+ StringData = self.StringData
+ elif self.FileName:
+ if Dict is None:
+ Dict = {}
+ FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
+ FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
+ FileObj = open(FileNameStr, 'r')
+ StringData = FileObj.read()
+ StringData = '"' + StringData + '"'
+ FileObj.close()
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
+ Ver=StringData, BuildNumber=self.BuildNum, IsMakefile=IsMakefile)
+ OutputFileList = []
+ OutputFileList.append(OutputFile)
+ return OutputFileList, self.Alignment
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/__init__.py
new file mode 100644
index 00000000..017d2686
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'GenFds' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py
new file mode 100755
index 00000000..7e18bff7
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py
@@ -0,0 +1,228 @@
+## @file
+# Generate PCD table for 'Patchable In Module' type PCD with given .map file.
+# The Patch PCD table like:
+#
+# PCD Name Offset in binary
+# ======== ================
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+#
+
+#====================================== External Libraries ========================================
+from __future__ import print_function
+import optparse
+import Common.LongFilePathOs as os
+import re
+import array
+
+from Common.BuildToolError import *
+import Common.EdkLogger as EdkLogger
+from Common.Misc import PeImageClass, startPatternGeneral, addressPatternGeneral, valuePatternGcc, pcdPatternGcc, secReGeneral
+from Common.BuildVersion import gBUILD_VERSION
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+# Version and Copyright
+__version_number__ = ("0.10" + " " + gBUILD_VERSION)
+__version__ = "%prog Version " + __version_number__
+__copyright__ = "Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved."
+
+#====================================== Internal Libraries ========================================
+
+#============================================== Code ===============================================
+symRe = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\.\-:\\\\\w\?@\$<>]+) +([\da-fA-F]+)', re.UNICODE)
+
+def parsePcdInfoFromMapFile(mapfilepath, efifilepath):
+ """ Parse map file to get binary patch pcd information
+ @param path Map file absolution path
+
+ @return a list which element hold (PcdName, Offset, SectionName)
+ """
+ lines = []
+ try:
+ f = open(mapfilepath, 'r')
+ lines = f.readlines()
+ f.close()
+ except:
+ return None
+
+ if len(lines) == 0: return None
+ firstline = lines[0].strip()
+ if re.match('^\s*Address\s*Size\s*Align\s*Out\s*In\s*Symbol\s*$', firstline):
+ return _parseForXcodeAndClang9(lines, efifilepath)
+ if (firstline.startswith("Archive member included ") and
+ firstline.endswith(" file (symbol)")):
+ return _parseForGCC(lines, efifilepath)
+ if firstline.startswith("# Path:"):
+ return _parseForXcodeAndClang9(lines, efifilepath)
+ return _parseGeneral(lines, efifilepath)
+
+def _parseForXcodeAndClang9(lines, efifilepath):
+ valuePattern = re.compile('^([\da-fA-FxX]+)([\s\S]*)([_]*_gPcd_BinaryPatch_([\w]+))')
+ status = 0
+ pcds = []
+ for line in lines:
+ line = line.strip()
+ if status == 0 and (re.match('^\s*Address\s*Size\s*Align\s*Out\s*In\s*Symbol\s*$', line) \
+ or line == "# Symbols:"):
+ status = 1
+ continue
+ if status == 1 and len(line) != 0:
+ if '_gPcd_BinaryPatch_' in line:
+ m = valuePattern.match(line)
+ if m is not None:
+ pcds.append((m.groups(0)[3], int(m.groups(0)[0], 16)))
+ return pcds
+
+def _parseForGCC(lines, efifilepath):
+ """ Parse map file generated by GCC linker """
+ dataPattern = re.compile('^.data._gPcd_BinaryPatch_([\w_\d]+)$')
+ status = 0
+ imageBase = -1
+ sections = []
+ bpcds = []
+ for index, line in enumerate(lines):
+ line = line.strip()
+ # status machine transection
+ if status == 0 and line == "Memory Configuration":
+ status = 1
+ continue
+ elif status == 1 and line == 'Linker script and memory map':
+ status = 2
+ continue
+ elif status ==2 and line == 'START GROUP':
+ status = 3
+ continue
+
+ # status handler
+ if status == 3:
+ m = valuePatternGcc.match(line)
+ if m is not None:
+ sections.append(m.groups(0))
+ if status == 3:
+ m = dataPattern.match(line)
+ if m is not None:
+ if lines[index + 1]:
+ PcdName = m.groups(0)[0]
+ m = pcdPatternGcc.match(lines[index + 1].strip())
+ if m is not None:
+ bpcds.append((PcdName, int(m.groups(0)[0], 16), int(sections[-1][1], 16), sections[-1][0]))
+
+ # get section information from efi file
+ efisecs = PeImageClass(efifilepath).SectionHeaderList
+ if efisecs is None or len(efisecs) == 0:
+ return None
+ #redirection
+ redirection = 0
+ for efisec in efisecs:
+ for section in sections:
+ if section[0].strip() == efisec[0].strip() and section[0].strip() == '.text':
+ redirection = int(section[1], 16) - efisec[1]
+ pcds = []
+ for pcd in bpcds:
+ for efisec in efisecs:
+ if pcd[1] >= efisec[1] and pcd[1] < efisec[1]+efisec[3]:
+ #assert efisec[0].strip() == pcd[3].strip() and efisec[1] + redirection == pcd[2], "There are some differences between map file and efi file"
+ pcds.append([pcd[0], efisec[2] + pcd[1] - efisec[1] - redirection, efisec[0]])
+ return pcds
+
+def _parseGeneral(lines, efifilepath):
+ """ For MSFT, ICC, EBC
+ @param lines line array for map file
+
+ @return a list which element hold (PcdName, Offset, SectionName)
+ """
+ status = 0 #0 - beginning of file; 1 - PE section definition; 2 - symbol table
+ secs = [] # key = section name
+ bPcds = []
+ symPattern = re.compile('^[_]+gPcd_BinaryPatch_([\w]+)')
+
+ for line in lines:
+ line = line.strip()
+ if startPatternGeneral.match(line):
+ status = 1
+ continue
+ if addressPatternGeneral.match(line):
+ status = 2
+ continue
+ if line.startswith("entry point at"):
+ status = 3
+ continue
+ if status == 1 and len(line) != 0:
+ m = secReGeneral.match(line)
+ assert m is not None, "Fail to parse the section in map file , line is %s" % line
+ sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)
+ secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])
+ if status == 2 and len(line) != 0:
+ m = symRe.match(line)
+ assert m is not None, "Fail to parse the symbol in map file, line is %s" % line
+ sec_no, sym_offset, sym_name, vir_addr = m.groups(0)
+ sec_no = int(sec_no, 16)
+ sym_offset = int(sym_offset, 16)
+ vir_addr = int(vir_addr, 16)
+ m2 = symPattern.match(sym_name)
+ if m2 is not None:
+ # fond a binary pcd entry in map file
+ for sec in secs:
+ if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]):
+ bPcds.append([m2.groups(0)[0], sec[3], sym_offset, vir_addr, sec_no])
+
+ if len(bPcds) == 0: return None
+
+ # get section information from efi file
+ efisecs = PeImageClass(efifilepath).SectionHeaderList
+ if efisecs is None or len(efisecs) == 0:
+ return None
+
+ pcds = []
+ for pcd in bPcds:
+ index = 0
+ for efisec in efisecs:
+ index = index + 1
+ if pcd[1].strip() == efisec[0].strip():
+ pcds.append([pcd[0], efisec[2] + pcd[2], efisec[0]])
+ elif pcd[4] == index:
+ pcds.append([pcd[0], efisec[2] + pcd[2], efisec[0]])
+ return pcds
+
+def generatePcdTable(list, pcdpath):
+ try:
+ f = open(pcdpath, 'w')
+ except:
+ pass
+
+ f.write('PCD Name Offset Section Name\r\n')
+
+ for pcditem in list:
+ f.write('%-30s 0x%-08X %-6s\r\n' % (pcditem[0], pcditem[1], pcditem[2]))
+ f.close()
+
+ #print 'Success to generate Binary Patch PCD table at %s!' % pcdpath
+
+if __name__ == '__main__':
+ UsageString = "%prog -m <MapFile> -e <EfiFile> -o <OutFile>"
+ AdditionalNotes = "\nPCD table is generated in file name with .BinaryPcdTable.txt postfix"
+ parser = optparse.OptionParser(description=__copyright__, version=__version__, usage=UsageString)
+ parser.add_option('-m', '--mapfile', action='store', dest='mapfile',
+ help='Absolute path of module map file.')
+ parser.add_option('-e', '--efifile', action='store', dest='efifile',
+ help='Absolute path of EFI binary file.')
+ parser.add_option('-o', '--outputfile', action='store', dest='outfile',
+ help='Absolute path of output file to store the got patchable PCD table.')
+
+ (options, args) = parser.parse_args()
+
+ if options.mapfile is None or options.efifile is None:
+ print(parser.get_usage())
+ elif os.path.exists(options.mapfile) and os.path.exists(options.efifile):
+ list = parsePcdInfoFromMapFile(options.mapfile, options.efifile)
+ if list is not None:
+ if options.outfile is not None:
+ generatePcdTable(list, options.outfile)
+ else:
+ generatePcdTable(list, options.mapfile.replace('.map', '.BinaryPcdTable.txt'))
+ else:
+ print('Fail to generate Patch PCD Table based on map file and efi file')
+ else:
+ print('Fail to generate Patch PCD Table for fail to find map file or efi file!')
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenPatchPcdTable/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenPatchPcdTable/__init__.py
new file mode 100644
index 00000000..b09e0e1f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenPatchPcdTable/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'GenPatchPcdTable' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Makefile b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Makefile
new file mode 100644
index 00000000..7b4f62d2
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Makefile
@@ -0,0 +1,13 @@
+## @file
+# Windows makefile for Python tools build.
+#
+# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+all:
+
+clean:
+cleanall:
+ @del /f /q $(BASE_TOOLS_PATH)\Source\Python\*.pyc
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py
new file mode 100755
index 00000000..fb55175b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py
@@ -0,0 +1,280 @@
+## @file
+# Patch value into the binary file.
+#
+# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+import Common.LongFilePathOs as os
+from Common.LongFilePathSupport import OpenLongFilePath as open
+import sys
+
+from optparse import OptionParser
+from optparse import make_option
+from Common.BuildToolError import *
+import Common.EdkLogger as EdkLogger
+from Common.BuildVersion import gBUILD_VERSION
+import array
+from Common.DataType import *
+
+# Version and Copyright
+__version_number__ = ("0.10" + " " + gBUILD_VERSION)
+__version__ = "%prog Version " + __version_number__
+__copyright__ = "Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved."
+
+## PatchBinaryFile method
+#
+# This method mainly patches the data into binary file.
+#
+# @param FileName File path of the binary file
+# @param ValueOffset Offset value
+# @param TypeName DataType Name
+# @param Value Value String
+# @param MaxSize MaxSize value
+#
+# @retval 0 File is updated successfully.
+# @retval not 0 File is updated failed.
+#
+def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
+ #
+ # Length of Binary File
+ #
+ FileHandle = open(FileName, 'rb')
+ FileHandle.seek (0, 2)
+ FileLength = FileHandle.tell()
+ FileHandle.close()
+ #
+ # Unify string to upper string
+ #
+ TypeName = TypeName.upper()
+ #
+ # Get PCD value data length
+ #
+ ValueLength = 0
+ if TypeName == 'BOOLEAN':
+ ValueLength = 1
+ elif TypeName == TAB_UINT8:
+ ValueLength = 1
+ elif TypeName == TAB_UINT16:
+ ValueLength = 2
+ elif TypeName == TAB_UINT32:
+ ValueLength = 4
+ elif TypeName == TAB_UINT64:
+ ValueLength = 8
+ elif TypeName == TAB_VOID:
+ if MaxSize == 0:
+ return OPTION_MISSING, "PcdMaxSize is not specified for VOID* type PCD."
+ ValueLength = int(MaxSize)
+ else:
+ return PARAMETER_INVALID, "PCD type %s is not valid." % (CommandOptions.PcdTypeName)
+ #
+ # Check PcdValue is in the input binary file.
+ #
+ if ValueOffset + ValueLength > FileLength:
+ return PARAMETER_INVALID, "PcdOffset + PcdMaxSize(DataType) is larger than the input file size."
+ #
+ # Read binary file into array
+ #
+ FileHandle = open(FileName, 'rb')
+ ByteArray = array.array('B')
+ ByteArray.fromfile(FileHandle, FileLength)
+ FileHandle.close()
+ OrigByteList = ByteArray.tolist()
+ ByteList = ByteArray.tolist()
+ #
+ # Clear the data in file
+ #
+ for Index in range(ValueLength):
+ ByteList[ValueOffset + Index] = 0
+ #
+ # Patch value into offset
+ #
+ SavedStr = ValueString
+ ValueString = ValueString.upper()
+ ValueNumber = 0
+ if TypeName == 'BOOLEAN':
+ #
+ # Get PCD value for BOOLEAN data type
+ #
+ try:
+ if ValueString == 'TRUE':
+ ValueNumber = 1
+ elif ValueString == 'FALSE':
+ ValueNumber = 0
+ ValueNumber = int (ValueString, 0)
+ if ValueNumber != 0:
+ ValueNumber = 1
+ except:
+ return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % (ValueString)
+ #
+ # Set PCD value into binary data
+ #
+ ByteList[ValueOffset] = ValueNumber
+ elif TypeName in TAB_PCD_CLEAN_NUMERIC_TYPES:
+ #
+ # Get PCD value for UINT* data type
+ #
+ try:
+ ValueNumber = int (ValueString, 0)
+ except:
+ return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % (ValueString)
+ #
+ # Set PCD value into binary data
+ #
+ for Index in range(ValueLength):
+ ByteList[ValueOffset + Index] = ValueNumber % 0x100
+ ValueNumber = ValueNumber // 0x100
+ elif TypeName == TAB_VOID:
+ ValueString = SavedStr
+ if ValueString.startswith('L"'):
+ #
+ # Patch Unicode String
+ #
+ Index = 0
+ for ByteString in ValueString[2:-1]:
+ #
+ # Reserve zero as unicode tail
+ #
+ if Index + 2 >= ValueLength:
+ break
+ #
+ # Set string value one by one/ 0x100
+ #
+ ByteList[ValueOffset + Index] = ord(ByteString)
+ Index = Index + 2
+ elif ValueString.startswith("{") and ValueString.endswith("}"):
+ #
+ # Patch {0x1, 0x2, ...} byte by byte
+ #
+ ValueList = ValueString[1 : len(ValueString) - 1].split(',')
+ Index = 0
+ try:
+ for ByteString in ValueList:
+ ByteString = ByteString.strip()
+ if ByteString.upper().startswith('0X'):
+ ByteValue = int(ByteString, 16)
+ else:
+ ByteValue = int(ByteString)
+ ByteList[ValueOffset + Index] = ByteValue % 0x100
+ Index = Index + 1
+ if Index >= ValueLength:
+ break
+ except:
+ return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string array." % (ValueString)
+ else:
+ #
+ # Patch ascii string
+ #
+ Index = 0
+ for ByteString in ValueString[1:-1]:
+ #
+ # Reserve zero as string tail
+ #
+ if Index + 1 >= ValueLength:
+ break
+ #
+ # Set string value one by one
+ #
+ ByteList[ValueOffset + Index] = ord(ByteString)
+ Index = Index + 1
+ #
+ # Update new data into input file.
+ #
+ if ByteList != OrigByteList:
+ ByteArray = array.array('B')
+ ByteArray.fromlist(ByteList)
+ FileHandle = open(FileName, 'wb')
+ ByteArray.tofile(FileHandle)
+ FileHandle.close()
+ return 0, "Patch Value into File %s successfully." % (FileName)
+
+## Parse command line options
+#
+# Using standard Python module optparse to parse command line option of this tool.
+#
+# @retval Options A optparse.Values object containing the parsed options
+# @retval InputFile Path of file to be trimmed
+#
+def Options():
+ OptionList = [
+ make_option("-f", "--offset", dest="PcdOffset", action="store", type="int",
+ help="Start offset to the image is used to store PCD value."),
+ make_option("-u", "--value", dest="PcdValue", action="store",
+ help="PCD value will be updated into the image."),
+ make_option("-t", "--type", dest="PcdTypeName", action="store",
+ help="The name of PCD data type may be one of VOID*,BOOLEAN, UINT8, UINT16, UINT32, UINT64."),
+ make_option("-s", "--maxsize", dest="PcdMaxSize", action="store", type="int",
+ help="Max size of data buffer is taken by PCD value.It must be set when PCD type is VOID*."),
+ make_option("-v", "--verbose", dest="LogLevel", action="store_const", const=EdkLogger.VERBOSE,
+ help="Run verbosely"),
+ make_option("-d", "--debug", dest="LogLevel", type="int",
+ help="Run with debug information"),
+ make_option("-q", "--quiet", dest="LogLevel", action="store_const", const=EdkLogger.QUIET,
+ help="Run quietly"),
+ make_option("-?", action="help", help="show this help message and exit"),
+ ]
+
+ # use clearer usage to override default usage message
+ UsageString = "%prog -f Offset -u Value -t Type [-s MaxSize] <input_file>"
+
+ Parser = OptionParser(description=__copyright__, version=__version__, option_list=OptionList, usage=UsageString)
+ Parser.set_defaults(LogLevel=EdkLogger.INFO)
+
+ Options, Args = Parser.parse_args()
+
+ # error check
+ if len(Args) == 0:
+ EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData=Parser.get_usage())
+
+ InputFile = Args[len(Args) - 1]
+ return Options, InputFile
+
+## Entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @retval 0 Tool was successful
+# @retval 1 Tool failed
+#
+def Main():
+ try:
+ #
+ # Check input parameter
+ #
+ EdkLogger.Initialize()
+ CommandOptions, InputFile = Options()
+ if CommandOptions.LogLevel < EdkLogger.DEBUG_9:
+ EdkLogger.SetLevel(CommandOptions.LogLevel + 1)
+ else:
+ EdkLogger.SetLevel(CommandOptions.LogLevel)
+ if not os.path.exists (InputFile):
+ EdkLogger.error("PatchPcdValue", FILE_NOT_FOUND, ExtraData=InputFile)
+ return 1
+ if CommandOptions.PcdOffset is None or CommandOptions.PcdValue is None or CommandOptions.PcdTypeName is None:
+ EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdOffset or PcdValue of PcdTypeName is not specified.")
+ return 1
+ if CommandOptions.PcdTypeName.upper() not in TAB_PCD_NUMERIC_TYPES_VOID:
+ EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData="PCD type %s is not valid." % (CommandOptions.PcdTypeName))
+ return 1
+ if CommandOptions.PcdTypeName.upper() == TAB_VOID and CommandOptions.PcdMaxSize is None:
+ EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdMaxSize is not specified for VOID* type PCD.")
+ return 1
+ #
+ # Patch value into binary image.
+ #
+ ReturnValue, ErrorInfo = PatchBinaryFile (InputFile, CommandOptions.PcdOffset, CommandOptions.PcdTypeName, CommandOptions.PcdValue, CommandOptions.PcdMaxSize)
+ if ReturnValue != 0:
+ EdkLogger.error("PatchPcdValue", ReturnValue, ExtraData=ErrorInfo)
+ return 1
+ return 0
+ except:
+ return 1
+
+if __name__ == '__main__':
+ r = Main()
+ sys.exit(r)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/PatchPcdValue/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/PatchPcdValue/__init__.py
new file mode 100644
index 00000000..a7d43d16
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/PatchPcdValue/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'PatchPcdValue' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/Pkcs7Sign.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/Pkcs7Sign.py
new file mode 100755
index 00000000..04218427
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/Pkcs7Sign.py
@@ -0,0 +1,280 @@
+## @file
+# This tool adds EFI_FIRMWARE_IMAGE_AUTHENTICATION for a binary.
+#
+# This tool only support CertType - EFI_CERT_TYPE_PKCS7_GUID
+# {0x4aafd29d, 0x68df, 0x49ee, {0x8a, 0xa9, 0x34, 0x7d, 0x37, 0x56, 0x65, 0xa7}}
+#
+# This tool has been tested with OpenSSL.
+#
+# Copyright (c) 2016 - 2017, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Pkcs7Sign
+'''
+from __future__ import print_function
+
+import os
+import sys
+import argparse
+import subprocess
+import uuid
+import struct
+import collections
+from Common.BuildVersion import gBUILD_VERSION
+
+#
+# Globals for help information
+#
+__prog__ = 'Pkcs7Sign'
+__version__ = '%s Version %s' % (__prog__, '0.9 ' + gBUILD_VERSION)
+__copyright__ = 'Copyright (c) 2016, Intel Corporation. All rights reserved.'
+__usage__ = '%s -e|-d [options] <input_file>' % (__prog__)
+
+#
+# GUID for PKCS7 from UEFI Specification
+#
+WIN_CERT_REVISION = 0x0200
+WIN_CERT_TYPE_EFI_GUID = 0x0EF1
+EFI_CERT_TYPE_PKCS7_GUID = uuid.UUID('{4aafd29d-68df-49ee-8aa9-347d375665a7}')
+
+#
+# typedef struct _WIN_CERTIFICATE {
+# UINT32 dwLength;
+# UINT16 wRevision;
+# UINT16 wCertificateType;
+# //UINT8 bCertificate[ANYSIZE_ARRAY];
+# } WIN_CERTIFICATE;
+#
+# typedef struct _WIN_CERTIFICATE_UEFI_GUID {
+# WIN_CERTIFICATE Hdr;
+# EFI_GUID CertType;
+# //UINT8 CertData[ANYSIZE_ARRAY];
+# } WIN_CERTIFICATE_UEFI_GUID;
+#
+# typedef struct {
+# UINT64 MonotonicCount;
+# WIN_CERTIFICATE_UEFI_GUID AuthInfo;
+# } EFI_FIRMWARE_IMAGE_AUTHENTICATION;
+#
+
+#
+# Filename of test signing private cert that is stored in same directory as this tool
+#
+TEST_SIGNER_PRIVATE_CERT_FILENAME = 'TestCert.pem'
+TEST_OTHER_PUBLIC_CERT_FILENAME = 'TestSub.pub.pem'
+TEST_TRUSTED_PUBLIC_CERT_FILENAME = 'TestRoot.pub.pem'
+
+if __name__ == '__main__':
+ #
+ # Create command line argument parser object
+ #
+ parser = argparse.ArgumentParser(prog=__prog__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
+ group = parser.add_mutually_exclusive_group(required=True)
+ group.add_argument("-e", action="store_true", dest='Encode', help='encode file')
+ group.add_argument("-d", action="store_true", dest='Decode', help='decode file')
+ group.add_argument("--version", action='version', version=__version__)
+ parser.add_argument("-o", "--output", dest='OutputFile', type=str, metavar='filename', help="specify the output filename", required=True)
+ parser.add_argument("--signer-private-cert", dest='SignerPrivateCertFile', type=argparse.FileType('rb'), help="specify the signer private cert filename. If not specified, a test signer private cert is used.")
+ parser.add_argument("--other-public-cert", dest='OtherPublicCertFile', type=argparse.FileType('rb'), help="specify the other public cert filename. If not specified, a test other public cert is used.")
+ parser.add_argument("--trusted-public-cert", dest='TrustedPublicCertFile', type=argparse.FileType('rb'), help="specify the trusted public cert filename. If not specified, a test trusted public cert is used.")
+ parser.add_argument("--monotonic-count", dest='MonotonicCountStr', type=str, help="specify the MonotonicCount in FMP capsule. If not specified, 0 is used.")
+ parser.add_argument("--signature-size", dest='SignatureSizeStr', type=str, help="specify the signature size for decode process.")
+ parser.add_argument("-v", "--verbose", dest='Verbose', action="store_true", help="increase output messages")
+ parser.add_argument("-q", "--quiet", dest='Quiet', action="store_true", help="reduce output messages")
+ parser.add_argument("--debug", dest='Debug', type=int, metavar='[0-9]', choices=range(0, 10), default=0, help="set debug level")
+ parser.add_argument(metavar="input_file", dest='InputFile', type=argparse.FileType('rb'), help="specify the input filename")
+
+ #
+ # Parse command line arguments
+ #
+ args = parser.parse_args()
+
+ #
+ # Generate file path to Open SSL command
+ #
+ OpenSslCommand = 'openssl'
+ try:
+ OpenSslPath = os.environ['OPENSSL_PATH']
+ OpenSslCommand = os.path.join(OpenSslPath, OpenSslCommand)
+ if ' ' in OpenSslCommand:
+ OpenSslCommand = '"' + OpenSslCommand + '"'
+ except:
+ pass
+
+ #
+ # Verify that Open SSL command is available
+ #
+ try:
+ Process = subprocess.Popen('%s version' % (OpenSslCommand), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ except:
+ print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
+ sys.exit(1)
+
+ Version = Process.communicate()
+ if Process.returncode != 0:
+ print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
+ sys.exit(Process.returncode)
+ print(Version[0].decode())
+
+ #
+ # Read input file into a buffer and save input filename
+ #
+ args.InputFileName = args.InputFile.name
+ args.InputFileBuffer = args.InputFile.read()
+ args.InputFile.close()
+
+ #
+ # Save output filename and check if path exists
+ #
+ OutputDir = os.path.dirname(args.OutputFile)
+ if not os.path.exists(OutputDir):
+ print('ERROR: The output path does not exist: %s' % OutputDir)
+ sys.exit(1)
+ args.OutputFileName = args.OutputFile
+
+ try:
+ if args.MonotonicCountStr.upper().startswith('0X'):
+ args.MonotonicCountValue = int(args.MonotonicCountStr, 16)
+ else:
+ args.MonotonicCountValue = int(args.MonotonicCountStr)
+ except:
+ args.MonotonicCountValue = int(0)
+
+ if args.Encode:
+ #
+ # Save signer private cert filename and close private cert file
+ #
+ try:
+ args.SignerPrivateCertFileName = args.SignerPrivateCertFile.name
+ args.SignerPrivateCertFile.close()
+ except:
+ try:
+ #
+ # Get path to currently executing script or executable
+ #
+ if hasattr(sys, 'frozen'):
+ Pkcs7ToolPath = sys.executable
+ else:
+ Pkcs7ToolPath = sys.argv[0]
+ if Pkcs7ToolPath.startswith('"'):
+ Pkcs7ToolPath = Pkcs7ToolPath[1:]
+ if Pkcs7ToolPath.endswith('"'):
+ Pkcs7ToolPath = RsaToolPath[:-1]
+ args.SignerPrivateCertFileName = os.path.join(os.path.dirname(os.path.realpath(Pkcs7ToolPath)), TEST_SIGNER_PRIVATE_CERT_FILENAME)
+ args.SignerPrivateCertFile = open(args.SignerPrivateCertFileName, 'rb')
+ args.SignerPrivateCertFile.close()
+ except:
+ print('ERROR: test signer private cert file %s missing' % (args.SignerPrivateCertFileName))
+ sys.exit(1)
+
+ #
+ # Save other public cert filename and close public cert file
+ #
+ try:
+ args.OtherPublicCertFileName = args.OtherPublicCertFile.name
+ args.OtherPublicCertFile.close()
+ except:
+ try:
+ #
+ # Get path to currently executing script or executable
+ #
+ if hasattr(sys, 'frozen'):
+ Pkcs7ToolPath = sys.executable
+ else:
+ Pkcs7ToolPath = sys.argv[0]
+ if Pkcs7ToolPath.startswith('"'):
+ Pkcs7ToolPath = Pkcs7ToolPath[1:]
+ if Pkcs7ToolPath.endswith('"'):
+ Pkcs7ToolPath = RsaToolPath[:-1]
+ args.OtherPublicCertFileName = os.path.join(os.path.dirname(os.path.realpath(Pkcs7ToolPath)), TEST_OTHER_PUBLIC_CERT_FILENAME)
+ args.OtherPublicCertFile = open(args.OtherPublicCertFileName, 'rb')
+ args.OtherPublicCertFile.close()
+ except:
+ print('ERROR: test other public cert file %s missing' % (args.OtherPublicCertFileName))
+ sys.exit(1)
+
+ format = "%dsQ" % len(args.InputFileBuffer)
+ FullInputFileBuffer = struct.pack(format, args.InputFileBuffer, args.MonotonicCountValue)
+
+ #
+ # Sign the input file using the specified private key and capture signature from STDOUT
+ #
+ Process = subprocess.Popen('%s smime -sign -binary -signer "%s" -outform DER -md sha256 -certfile "%s"' % (OpenSslCommand, args.SignerPrivateCertFileName, args.OtherPublicCertFileName), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ Signature = Process.communicate(input=FullInputFileBuffer)[0]
+ if Process.returncode != 0:
+ sys.exit(Process.returncode)
+
+ #
+ # Write output file that contains Signature, and Input data
+ #
+ args.OutputFile = open(args.OutputFileName, 'wb')
+ args.OutputFile.write(Signature)
+ args.OutputFile.write(args.InputFileBuffer)
+ args.OutputFile.close()
+
+ if args.Decode:
+ #
+ # Save trusted public cert filename and close public cert file
+ #
+ try:
+ args.TrustedPublicCertFileName = args.TrustedPublicCertFile.name
+ args.TrustedPublicCertFile.close()
+ except:
+ try:
+ #
+ # Get path to currently executing script or executable
+ #
+ if hasattr(sys, 'frozen'):
+ Pkcs7ToolPath = sys.executable
+ else:
+ Pkcs7ToolPath = sys.argv[0]
+ if Pkcs7ToolPath.startswith('"'):
+ Pkcs7ToolPath = Pkcs7ToolPath[1:]
+ if Pkcs7ToolPath.endswith('"'):
+ Pkcs7ToolPath = RsaToolPath[:-1]
+ args.TrustedPublicCertFileName = os.path.join(os.path.dirname(os.path.realpath(Pkcs7ToolPath)), TEST_TRUSTED_PUBLIC_CERT_FILENAME)
+ args.TrustedPublicCertFile = open(args.TrustedPublicCertFileName, 'rb')
+ args.TrustedPublicCertFile.close()
+ except:
+ print('ERROR: test trusted public cert file %s missing' % (args.TrustedPublicCertFileName))
+ sys.exit(1)
+
+ if not args.SignatureSizeStr:
+ print("ERROR: please use the option --signature-size to specify the size of the signature data!")
+ sys.exit(1)
+ else:
+ if args.SignatureSizeStr.upper().startswith('0X'):
+ SignatureSize = int(args.SignatureSizeStr, 16)
+ else:
+ SignatureSize = int(args.SignatureSizeStr)
+ if SignatureSize < 0:
+ print("ERROR: The value of option --signature-size can't be set to negative value!")
+ sys.exit(1)
+ elif SignatureSize > len(args.InputFileBuffer):
+ print("ERROR: The value of option --signature-size is exceed the size of the input file !")
+ sys.exit(1)
+
+ args.SignatureBuffer = args.InputFileBuffer[0:SignatureSize]
+ args.InputFileBuffer = args.InputFileBuffer[SignatureSize:]
+
+ format = "%dsQ" % len(args.InputFileBuffer)
+ FullInputFileBuffer = struct.pack(format, args.InputFileBuffer, args.MonotonicCountValue)
+
+ #
+ # Save output file contents from input file
+ #
+ open(args.OutputFileName, 'wb').write(FullInputFileBuffer)
+
+ #
+ # Verify signature
+ #
+ Process = subprocess.Popen('%s smime -verify -inform DER -content %s -CAfile %s' % (OpenSslCommand, args.OutputFileName, args.TrustedPublicCertFileName), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ Process.communicate(input=args.SignatureBuffer)[0]
+ if Process.returncode != 0:
+ print('ERROR: Verification failed')
+ os.remove (args.OutputFileName)
+ sys.exit(Process.returncode)
+
+ open(args.OutputFileName, 'wb').write(args.InputFileBuffer)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/Readme.md b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/Readme.md
new file mode 100644
index 00000000..c5bae61a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/Readme.md
@@ -0,0 +1,158 @@
+# Step by step to generate sample self-signed X.509 certificate chain and sign data with PKCS7 structure
+
+This readme demonstrates how to generate 3-layer X.509 certificate chain (RootCA -> IntermediateCA -> SigningCert) with OpenSSL commands, and user MUST set a UNIQUE Subject Name ("Common Name") on these three different certificates.
+
+## How to generate a self-signed X.509 certificate chain via OPENSSL
+* Set OPENSSL environment.
+
+NOTE: Below steps are required for Windows. Linux may already have the OPENSSL environment correctly.
+
+ set OPENSSL_HOME=c:\home\openssl\openssl-[version]
+ set OPENSSL_CONF=%OPENSSL_HOME%\apps\openssl.cnf
+
+When a user uses OpenSSL (req or ca command) to generate the certificates, OpenSSL will use the openssl.cnf file as the configuration data (can use "-config path/to/openssl.cnf" to describe the specific config file).
+
+The user need check the openssl.cnf file, to find your CA path setting, e.g. check if the path exists in [ CA_default ] section.
+
+ [ CA_default ]
+ dir = ./demoCA # Where everything is kept
+
+You may need the following steps for initialization:
+
+ rd ./demoCA /S/Q
+ mkdir ./demoCA
+ echo.>./demoCA/index.txt
+ echo 01 > ./demoCA/serial
+ mkdir ./demoCA/newcerts
+
+OpenSSL will apply the options from the specified sections in openssl.cnf when creating certificates or certificate signing requests. Make sure your configuration in openssl.cnf is correct and rational for certificate constraints.
+The following sample sections were used when generating test certificates in this readme.
+ ...
+ [ req ]
+ default_bits = 2048
+ default_keyfile = privkey.pem
+ distinguished_name = req_distinguished_name
+ attributes = req_attributes
+ x509_extensions = v3_ca # The extensions to add to the self signed cert
+ ...
+ [ v3_ca ]
+ # Extensions for a typical Root CA.
+ subjectKeyIdentifier=hash
+ authorityKeyIdentifier=keyid:always,issuer
+ basicConstraints = critical,CA:true
+ keyUsage = critical, digitalSignature, cRLSign, keyCertSign
+ ...
+ [ v3_intermediate_ca ]
+ # Extensions for a typical intermediate CA.
+ subjectKeyIdentifier = hash
+ authorityKeyIdentifier = keyid:always,issuer
+ basicConstraints = critical, CA:true
+ keyUsage = critical, digitalSignature, cRLSign, keyCertSign
+ ...
+ [ usr_cert ]
+ # Extensions for user end certificates.
+ basicConstraints = CA:FALSE
+ nsCertType = client, email
+ subjectKeyIdentifier = hash
+ authorityKeyIdentifier = keyid,issuer
+ keyUsage = critical, nonRepudiation, digitalSignature, keyEncipherment
+ extendedKeyUsage = clientAuth, emailProtection
+ ...
+
+* Generate the certificate chain:
+
+NOTE: User MUST set a UNIQUE "Common Name" on the different certificate
+
+1) Generate the Root Pair:
+
+Generate a root key:
+
+ openssl genrsa -aes256 -out TestRoot.key 2048
+
+Generate a self-signed root certificate:
+
+ openssl req -extensions v3_ca -new -x509 -days 3650 -key TestRoot.key -out TestRoot.crt
+ openssl x509 -in TestRoot.crt -out TestRoot.cer -outform DER
+ openssl x509 -inform DER -in TestRoot.cer -outform PEM -out TestRoot.pub.pem
+
+2) Generate the Intermediate Pair:
+
+Generate the intermediate key:
+
+ openssl genrsa -aes256 -out TestSub.key 2048
+
+Generate the intermediate certificate:
+
+ openssl req -new -days 3650 -key TestSub.key -out TestSub.csr
+ openssl ca -extensions v3_intermediate_ca -in TestSub.csr -days 3650 -out TestSub.crt -cert TestRoot.crt -keyfile TestRoot.key
+ openssl x509 -in TestSub.crt -out TestSub.cer -outform DER
+ openssl x509 -inform DER -in TestSub.cer -outform PEM -out TestSub.pub.pem
+
+3) Generate User Key Pair for Data Signing:
+
+Generate User key:
+
+ openssl genrsa -aes256 -out TestCert.key 2048
+
+Generate User certificate:
+
+ openssl req -new -days 3650 -key TestCert.key -out TestCert.csr
+ openssl ca -extensions usr_cert -in TestCert.csr -days 3650 -out TestCert.crt -cert TestSub.crt -keyfile TestSub.key
+ openssl x509 -in TestCert.crt -out TestCert.cer -outform DER
+ openssl x509 -inform DER -in TestCert.cer -outform PEM -out TestCert.pub.pem
+
+Convert Key and Certificate for signing. Password is removed with -nodes flag for convenience in this sample.
+
+ openssl pkcs12 -export -out TestCert.pfx -inkey TestCert.key -in TestCert.crt
+ openssl pkcs12 -in TestCert.pfx -nodes -out TestCert.pem
+
+* Verify Data Signing & Verification with new X.509 Certificate Chain
+
+1) Sign a Binary File to generate a detached PKCS7 signature:
+
+ openssl smime -sign -binary -signer TestCert.pem -outform DER -md sha256 -certfile TestSub.pub.pem -out test.bin.p7 -in test.bin
+
+2) Verify PKCS7 Signature of a Binary File:
+
+ openssl smime -verify -inform DER -in test.bin.p7 -content test.bin -CAfile TestRoot.pub.pem -out test.org.bin
+
+## Generate DSC PCD include files for Certificate
+
+The `BinToPcd` utility can be used to convert the binary Certificate file to a
+text file can be included from a DSC file to set a PCD to the contents of the
+Certificate file.
+
+The following 2 PCDs can be set to the PKCS7 Certificate value. The first one
+supports a single certificate. The second one supports multiple certificate
+values using the XDR format.
+* `gEfiSecurityPkgTokenSpaceGuid.PcdPkcs7CertBuffer`
+* `gFmpDevicePkgTokenSpaceGuid.PcdFmpDevicePkcs7CertBufferXdr`
+
+Generate DSC PCD include files:
+```
+BinToPcd.py -i TestRoot.cer -p gEfiSecurityPkgTokenSpaceGuid.PcdPkcs7CertBuffer -o TestRoot.cer.gEfiSecurityPkgTokenSpaceGuid.PcdPkcs7CertBuffer.inc
+BinToPcd.py -i TestRoot.cer -p gFmpDevicePkgTokenSpaceGuid.PcdFmpDevicePkcs7CertBufferXdr -x -o TestRoot.cer.gFmpDevicePkgTokenSpaceGuid.PcdFmpDevicePkcs7CertBufferXdr.inc
+```
+
+These files can be used in `!include` statements in DSC file PCD sections. For example:
+
+* Platform scoped fixed at build PCD section
+```
+[PcdsFixedAtBuild]
+ !include BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer.gEfiSecurityPkgTokenSpaceGuid.PcdPkcs7CertBuffer.inc
+```
+
+* Platform scoped patchable in module PCD section
+```
+[PcdsPatchableInModule]
+ !include BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer.gFmpDevicePkgTokenSpaceGuid.PcdFmpDevicePkcs7CertBufferXdr.inc
+```
+
+* Module scoped fixed at build PCD section
+```
+[Components]
+ FmpDevicePkg/FmpDxe/FmpDxe.inf {
+ <PcdsFixedAtBuild>
+ !include BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer.gFmpDevicePkgTokenSpaceGuid.PcdFmpDevicePkcs7CertBufferXdr.inc
+ }
+```
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestCert.pem b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestCert.pem
new file mode 100644
index 00000000..8950dde1
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestCert.pem
@@ -0,0 +1,60 @@
+Bag Attributes
+ localKeyID: 32 25 22 FA 81 B3 BF 25 E2 F7 8F 0B 1B C4 50 70 BB B7 85 96
+subject=/C=CN/ST=SH/O=TianoCore/OU=EDKII/CN=TestCert/emailAddress=edkii@tianocore.org
+issuer=/C=CN/ST=SH/O=TianoCore/OU=EDKII/CN=TestSub/emailAddress=edkii@tianocore.org
+-----BEGIN CERTIFICATE-----
+MIIEKzCCAxOgAwIBAgICEAMwDQYJKoZIhvcNAQELBQAwdDELMAkGA1UEBhMCQ04x
+CzAJBgNVBAgMAlNIMRIwEAYDVQQKDAlUaWFub0NvcmUxDjAMBgNVBAsMBUVES0lJ
+MRAwDgYDVQQDDAdUZXN0U3ViMSIwIAYJKoZIhvcNAQkBFhNlZGtpaUB0aWFub2Nv
+cmUub3JnMB4XDTE3MDQxMDA4MzgwNFoXDTE4MDQxMDA4MzgwNFowdTELMAkGA1UE
+BhMCQ04xCzAJBgNVBAgMAlNIMRIwEAYDVQQKDAlUaWFub0NvcmUxDjAMBgNVBAsM
+BUVES0lJMREwDwYDVQQDDAhUZXN0Q2VydDEiMCAGCSqGSIb3DQEJARYTZWRraWlA
+dGlhbm9jb3JlLm9yZzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPe+
+2NX/Tf0iWMJgYMXMoWOiveX9FGx9YcwH+BKn9ZPZHig6CsZ6B17fwBWek8rIOAOR
+W8FL+UyRhsnKF/oKjMN7awiLjackjq8m0bPFHVl4dJooulHmSPCsRMeG/pWs4DVP
+WiIoF1uvXN6MZ3zt0hofgqPnGjJQF0HLECrPqyBv7sit9fIaNZ/clqcR3ZqdXQRU
+fEk7dE8pg+ZjNNa/5WTGwSBB7Ieku4jGbKybvpj6FtEP/8YyAJC3fOD+Y4PIQCnF
+xzWchOGrFcoeqgf/hLhzoiRvalgnvjczbo3W4sgFwFD/WxoDqb1l1moHyOubw5oT
+CdD+J+QwdFl1kCkG+K8CAwEAAaOBxTCBwjAJBgNVHRMEAjAAMBEGCWCGSAGG+EIB
+AQQEAwIFoDAzBglghkgBhvhCAQ0EJhYkT3BlblNTTCBHZW5lcmF0ZWQgQ2xpZW50
+IENlcnRpZmljYXRlMB0GA1UdDgQWBBTACEuCjiL/cFrP+l8hECWctq+Q+TAfBgNV
+HSMEGDAWgBTWnWbWSXz6II1ddWkqQQp6A1ql6zAOBgNVHQ8BAf8EBAMCBeAwHQYD
+VR0lBBYwFAYIKwYBBQUHAwIGCCsGAQUFBwMEMA0GCSqGSIb3DQEBCwUAA4IBAQA7
+vYHdpk9u63dpMKAt5MrKU9dxVn/wuqNaYQMucvQLpcE12fgWMhV2wOHlmk3fJxq7
+CnD8QVaRbL3OQYWQQDA+sGNSJ9r71WFFET++94Rny6BzTz+dkrvIS4WaL/vLZ17c
+/gOsMCZUlhodxDcSSkachab3eE/VTEzOMUm41YYeW7USIoNSSgkWSnwZQVgcIg93
+F9X6lIr0Ik6rxHMq2ManiuSh6cMjJMGYGf2/58TySIefrXTe2A3TKQR27OYjfXJO
+l/H7u+4HS9AVCA7b9NihR5iSho5HrWqNC4Mmuz8D8iFOI2nWcek86StDswtoqDtu
+yekXblzF5lQY0goqDiks
+-----END CERTIFICATE-----
+Bag Attributes
+ localKeyID: 32 25 22 FA 81 B3 BF 25 E2 F7 8F 0B 1B C4 50 70 BB B7 85 96
+Key Attributes: <No Attributes>
+-----BEGIN PRIVATE KEY-----
+MIIEwAIBADANBgkqhkiG9w0BAQEFAASCBKowggSmAgEAAoIBAQD3vtjV/039IljC
+YGDFzKFjor3l/RRsfWHMB/gSp/WT2R4oOgrGegde38AVnpPKyDgDkVvBS/lMkYbJ
+yhf6CozDe2sIi42nJI6vJtGzxR1ZeHSaKLpR5kjwrETHhv6VrOA1T1oiKBdbr1ze
+jGd87dIaH4Kj5xoyUBdByxAqz6sgb+7IrfXyGjWf3JanEd2anV0EVHxJO3RPKYPm
+YzTWv+VkxsEgQeyHpLuIxmysm76Y+hbRD//GMgCQt3zg/mODyEApxcc1nIThqxXK
+HqoH/4S4c6Ikb2pYJ743M26N1uLIBcBQ/1saA6m9ZdZqB8jrm8OaEwnQ/ifkMHRZ
+dZApBvivAgMBAAECggEBAJ8NtLJ27T/1vBxWuepjfL217sroFyOrv4y5FQgNMvnP
+q6/Ry7cvAupjJjP7EhFfR67qtIi92PjSeUG18HzEJykdZFMhHTlQnBZRCtKqWzRk
+xB9wxGXuPafeQW4D+hBn4632GvzQ1mYziKEMbShkmr3QuxO1PDlO+A9yahfCKbBx
+SPCo+McV+N4c8ft/0UPMxqJLcZSMWscrBMCw1OhGdHry4CEr+NWHBeAAUWXrGSlq
+BPwM6PT00fku1RwQrw0QZw0YKL8VH5iA/uD8hfuaO2YUlt2Z025csNRyIPrizr6v
+Q8Is7jetqPpXulWSBtSYoghTj97DeYQQsQwck+tQN6kCgYEA/beFmdojyc9CoLkd
+0MgwyPBdWma77rj80PAgeRm0hl2KQa8pA6dL/1y5x3vA25gqBr++q+KmSkYT6z/Z
+n3llOk6pRlSWFlxuSLHVjOb/Qp1V/uxEG68Tg8L/I3SlMWiQ+/MnsXNHh+WEtKcZ
+FCVd0ASA4NbsKYKflT2QgraDB00CgYEA+fmRrwRlkh2OxVrxpGFER2uosYGlwQiq
+Xb75eU8BnpO8CCnXtBK4Uv3J6l/zfc+Tr2LzzgPkQiWd4NF1/EFxCNQA5kxGcPf5
+F4f8dPr8CrADO1JNrX2ITHsosaaC1ImdW/r6tl66Ie2ueCImk5Yfu5DQv7JrKh/d
+lrTEUxJL2esCgYEA2VKBla9MSGjH4XOvHk7busJotC6be3fo1e9ZYWGrSAyHiIvI
+zeBXMHz0hPJz16UXGoDTideyKJyuIyul9Pu+wZrvU9bQWIcD0DDDgtW6gAzUxG8M
+R8pHJO26LVyUwyWWSrmUnmLoOndWnIck7CS1nqC849o0n7nLh8IcLlq3EWECgYEA
+1HkeLE4na2f2R6fChv8qAy7uJ1rUodwUuzQtZsAR11EpXSL7tpLG27veGXpPQ9vh
+Yw1PwAesx9Cjfklr6OtTAbb5wMaKhVExB6BNpL0E6KytQon1foaaCLASadXnlHIY
+L+uHmOWxfk9BodkdQwsyk8JGvPoRfq+xMH0b9qQxltsCgYEAtNf8yvoTXUHa2zje
+PvI6OiQjuiON5UIt9KkQNrIrcm4wiQ2eVdkCQcUstuXtmBtvnsrxlay0jbSz2bV6
+1sWlJIvfZJujC901yMs5+twr6jMuXZ6ashWF1f2UbwgtKvh49PPgly4RhWST3Kp1
+J1AmCrzTwtaNmTZd1g5IYreXpKw=
+-----END PRIVATE KEY-----
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestCert.pub.pem b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestCert.pub.pem
new file mode 100644
index 00000000..70524b59
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestCert.pub.pem
@@ -0,0 +1,25 @@
+-----BEGIN CERTIFICATE-----
+MIIEKzCCAxOgAwIBAgICEAMwDQYJKoZIhvcNAQELBQAwdDELMAkGA1UEBhMCQ04x
+CzAJBgNVBAgMAlNIMRIwEAYDVQQKDAlUaWFub0NvcmUxDjAMBgNVBAsMBUVES0lJ
+MRAwDgYDVQQDDAdUZXN0U3ViMSIwIAYJKoZIhvcNAQkBFhNlZGtpaUB0aWFub2Nv
+cmUub3JnMB4XDTE3MDQxMDA4MzgwNFoXDTE4MDQxMDA4MzgwNFowdTELMAkGA1UE
+BhMCQ04xCzAJBgNVBAgMAlNIMRIwEAYDVQQKDAlUaWFub0NvcmUxDjAMBgNVBAsM
+BUVES0lJMREwDwYDVQQDDAhUZXN0Q2VydDEiMCAGCSqGSIb3DQEJARYTZWRraWlA
+dGlhbm9jb3JlLm9yZzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPe+
+2NX/Tf0iWMJgYMXMoWOiveX9FGx9YcwH+BKn9ZPZHig6CsZ6B17fwBWek8rIOAOR
+W8FL+UyRhsnKF/oKjMN7awiLjackjq8m0bPFHVl4dJooulHmSPCsRMeG/pWs4DVP
+WiIoF1uvXN6MZ3zt0hofgqPnGjJQF0HLECrPqyBv7sit9fIaNZ/clqcR3ZqdXQRU
+fEk7dE8pg+ZjNNa/5WTGwSBB7Ieku4jGbKybvpj6FtEP/8YyAJC3fOD+Y4PIQCnF
+xzWchOGrFcoeqgf/hLhzoiRvalgnvjczbo3W4sgFwFD/WxoDqb1l1moHyOubw5oT
+CdD+J+QwdFl1kCkG+K8CAwEAAaOBxTCBwjAJBgNVHRMEAjAAMBEGCWCGSAGG+EIB
+AQQEAwIFoDAzBglghkgBhvhCAQ0EJhYkT3BlblNTTCBHZW5lcmF0ZWQgQ2xpZW50
+IENlcnRpZmljYXRlMB0GA1UdDgQWBBTACEuCjiL/cFrP+l8hECWctq+Q+TAfBgNV
+HSMEGDAWgBTWnWbWSXz6II1ddWkqQQp6A1ql6zAOBgNVHQ8BAf8EBAMCBeAwHQYD
+VR0lBBYwFAYIKwYBBQUHAwIGCCsGAQUFBwMEMA0GCSqGSIb3DQEBCwUAA4IBAQA7
+vYHdpk9u63dpMKAt5MrKU9dxVn/wuqNaYQMucvQLpcE12fgWMhV2wOHlmk3fJxq7
+CnD8QVaRbL3OQYWQQDA+sGNSJ9r71WFFET++94Rny6BzTz+dkrvIS4WaL/vLZ17c
+/gOsMCZUlhodxDcSSkachab3eE/VTEzOMUm41YYeW7USIoNSSgkWSnwZQVgcIg93
+F9X6lIr0Ik6rxHMq2ManiuSh6cMjJMGYGf2/58TySIefrXTe2A3TKQR27OYjfXJO
+l/H7u+4HS9AVCA7b9NihR5iSho5HrWqNC4Mmuz8D8iFOI2nWcek86StDswtoqDtu
+yekXblzF5lQY0goqDiks
+-----END CERTIFICATE-----
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer
new file mode 100644
index 00000000..4c9bf0be
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer
Binary files differ
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer.gEfiSecurityPkgTokenSpaceGuid.PcdPkcs7CertBuffer.inc b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer.gEfiSecurityPkgTokenSpaceGuid.PcdPkcs7CertBuffer.inc
new file mode 100644
index 00000000..907c70dd
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer.gEfiSecurityPkgTokenSpaceGuid.PcdPkcs7CertBuffer.inc
@@ -0,0 +1 @@
+ gEfiSecurityPkgTokenSpaceGuid.PcdPkcs7CertBuffer|{0x30, 0x82, 0x03, 0xEC, 0x30, 0x82, 0x02, 0xD4, 0xA0, 0x03, 0x02, 0x01, 0x02, 0x02, 0x09, 0x00, 0xC0, 0x91, 0xC5, 0xE2, 0xB7, 0x66, 0xC0, 0xF8, 0x30, 0x0D, 0x06, 0x09, 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x01, 0x0B, 0x05, 0x00, 0x30, 0x81, 0x82, 0x31, 0x0B, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x43, 0x4E, 0x31, 0x0B, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x08, 0x0C, 0x02, 0x53, 0x48, 0x31, 0x0B, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x07, 0x0C, 0x02, 0x53, 0x48, 0x31, 0x12, 0x30, 0x10, 0x06, 0x03, 0x55, 0x04, 0x0A, 0x0C, 0x09, 0x54, 0x69, 0x61, 0x6E, 0x6F, 0x43, 0x6F, 0x72, 0x65, 0x31, 0x0E, 0x30, 0x0C, 0x06, 0x03, 0x55, 0x04, 0x0B, 0x0C, 0x05, 0x45, 0x44, 0x4B, 0x49, 0x49, 0x31, 0x11, 0x30, 0x0F, 0x06, 0x03, 0x55, 0x04, 0x03, 0x0C, 0x08, 0x54, 0x65, 0x73, 0x74, 0x52, 0x6F, 0x6F, 0x74, 0x31, 0x22, 0x30, 0x20, 0x06, 0x09, 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x09, 0x01, 0x16, 0x13, 0x65, 0x64, 0x6B, 0x69, 0x69, 0x40, 0x74, 0x69, 0x61, 0x6E, 0x6F, 0x63, 0x6F, 0x72, 0x65, 0x2E, 0x6F, 0x72, 0x67, 0x30, 0x1E, 0x17, 0x0D, 0x31, 0x37, 0x30, 0x34, 0x31, 0x30, 0x30, 0x38, 0x32, 0x37, 0x34, 0x30, 0x5A, 0x17, 0x0D, 0x31, 0x37, 0x30, 0x35, 0x31, 0x30, 0x30, 0x38, 0x32, 0x37, 0x34, 0x30, 0x5A, 0x30, 0x81, 0x82, 0x31, 0x0B, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x43, 0x4E, 0x31, 0x0B, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x08, 0x0C, 0x02, 0x53, 0x48, 0x31, 0x0B, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x07, 0x0C, 0x02, 0x53, 0x48, 0x31, 0x12, 0x30, 0x10, 0x06, 0x03, 0x55, 0x04, 0x0A, 0x0C, 0x09, 0x54, 0x69, 0x61, 0x6E, 0x6F, 0x43, 0x6F, 0x72, 0x65, 0x31, 0x0E, 0x30, 0x0C, 0x06, 0x03, 0x55, 0x04, 0x0B, 0x0C, 0x05, 0x45, 0x44, 0x4B, 0x49, 0x49, 0x31, 0x11, 0x30, 0x0F, 0x06, 0x03, 0x55, 0x04, 0x03, 0x0C, 0x08, 0x54, 0x65, 0x73, 0x74, 0x52, 0x6F, 0x6F, 0x74, 0x31, 0x22, 0x30, 0x20, 0x06, 0x09, 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x09, 0x01, 0x16, 0x13, 0x65, 0x64, 0x6B, 0x69, 0x69, 0x40, 0x74, 0x69, 0x61, 0x6E, 0x6F, 0x63, 0x6F, 0x72, 0x65, 0x2E, 0x6F, 0x72, 0x67, 0x30, 0x82, 0x01, 0x22, 0x30, 0x0D, 0x06, 0x09, 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x01, 0x01, 0x05, 0x00, 0x03, 0x82, 0x01, 0x0F, 0x00, 0x30, 0x82, 0x01, 0x0A, 0x02, 0x82, 0x01, 0x01, 0x00, 0xB9, 0x29, 0x29, 0x6C, 0x60, 0x0C, 0xD7, 0x23, 0xF6, 0x7D, 0xEE, 0xF0, 0x62, 0xFF, 0xD9, 0xC9, 0xAA, 0x55, 0x8C, 0x81, 0x95, 0x56, 0x3F, 0xB7, 0x56, 0x53, 0xB0, 0xC2, 0x82, 0x12, 0xC5, 0x3B, 0x75, 0x23, 0xB9, 0x4D, 0xD6, 0xC4, 0x55, 0x73, 0xF3, 0xAA, 0x95, 0xA8, 0x1B, 0xF3, 0x93, 0x7E, 0x9E, 0x40, 0xE4, 0x1D, 0x22, 0x9C, 0x93, 0x07, 0x0B, 0xD7, 0xAA, 0x5B, 0xD7, 0xE4, 0x1A, 0x21, 0x84, 0xD7, 0x63, 0x59, 0x03, 0x50, 0x1F, 0xF5, 0x14, 0x55, 0x93, 0x91, 0x9B, 0xF5, 0x52, 0xB0, 0xBF, 0x0E, 0x5C, 0x68, 0x3B, 0x59, 0x52, 0x98, 0x96, 0x56, 0xE1, 0xAB, 0xC4, 0x43, 0xBB, 0x05, 0x57, 0x78, 0x45, 0x01, 0x9F, 0x58, 0x15, 0x53, 0x0E, 0x11, 0x94, 0x2F, 0x0E, 0xF1, 0xA6, 0x19, 0xA2, 0x6E, 0x86, 0x39, 0x2B, 0x33, 0x8D, 0xC7, 0xC5, 0xEB, 0xEE, 0x1E, 0x33, 0xD3, 0x32, 0x94, 0xC1, 0x59, 0xC4, 0x0C, 0x97, 0x0B, 0x12, 0x48, 0x5F, 0x33, 0xF6, 0x60, 0x74, 0x7D, 0x57, 0xC2, 0x13, 0x2D, 0x7D, 0xA9, 0x87, 0xA3, 0x35, 0xEA, 0x91, 0x83, 0x3F, 0x67, 0x7A, 0x92, 0x1F, 0x01, 0x53, 0x9F, 0x62, 0x5F, 0x99, 0x12, 0xFD, 0x73, 0x1B, 0x2D, 0x9E, 0x2B, 0x6C, 0x34, 0x49, 0xAF, 0x4F, 0x07, 0x8F, 0xC0, 0xE9, 0x6B, 0x9E, 0x5F, 0x79, 0x35, 0xDA, 0x2A, 0x5C, 0x88, 0xEE, 0xF6, 0x48, 0x61, 0xDA, 0x96, 0xE3, 0x48, 0x46, 0xA0, 0x94, 0x1C, 0x9D, 0xF6, 0x5C, 0x87, 0x0E, 0xEF, 0x74, 0x09, 0x91, 0x0D, 0x3D, 0x5A, 0xE7, 0xC5, 0x4C, 0x8A, 0x7A, 0xAC, 0xA1, 0x85, 0xB6, 0x67, 0x44, 0x17, 0x55, 0x52, 0x3A, 0xE8, 0x11, 0x4D, 0x58, 0xA2, 0x93, 0x00, 0x62, 0xEA, 0x7B, 0x80, 0xED, 0xCF, 0xBD, 0xDF, 0x75, 0x80, 0x4B, 0xB9, 0x65, 0x63, 0xAD, 0x0B, 0x4D, 0x74, 0xFA, 0x59, 0x02, 0x03, 0x01, 0x00, 0x01, 0xA3, 0x63, 0x30, 0x61, 0x30, 0x1D, 0x06, 0x03, 0x55, 0x1D, 0x0E, 0x04, 0x16, 0x04, 0x14, 0x16, 0xAA, 0xD6, 0x8E, 0x1B, 0x2D, 0x43, 0xF3, 0x2D, 0xB0, 0x24, 0xAD, 0x36, 0x65, 0x3F, 0xB2, 0xFA, 0xB1, 0x2C, 0xED, 0x30, 0x1F, 0x06, 0x03, 0x55, 0x1D, 0x23, 0x04, 0x18, 0x30, 0x16, 0x80, 0x14, 0x16, 0xAA, 0xD6, 0x8E, 0x1B, 0x2D, 0x43, 0xF3, 0x2D, 0xB0, 0x24, 0xAD, 0x36, 0x65, 0x3F, 0xB2, 0xFA, 0xB1, 0x2C, 0xED, 0x30, 0x0F, 0x06, 0x03, 0x55, 0x1D, 0x13, 0x01, 0x01, 0xFF, 0x04, 0x05, 0x30, 0x03, 0x01, 0x01, 0xFF, 0x30, 0x0E, 0x06, 0x03, 0x55, 0x1D, 0x0F, 0x01, 0x01, 0xFF, 0x04, 0x04, 0x03, 0x02, 0x01, 0x86, 0x30, 0x0D, 0x06, 0x09, 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x01, 0x0B, 0x05, 0x00, 0x03, 0x82, 0x01, 0x01, 0x00, 0x95, 0xDE, 0xDF, 0xA4, 0x14, 0xDB, 0x92, 0x22, 0x78, 0x1A, 0xBD, 0x31, 0x9D, 0x1E, 0xD7, 0x2F, 0x0A, 0x10, 0x11, 0x5D, 0x74, 0x61, 0xE8, 0x30, 0xC4, 0xF3, 0x15, 0xE9, 0x30, 0x54, 0xF4, 0xBB, 0x0C, 0x04, 0x78, 0x13, 0x5D, 0x2C, 0xDD, 0x8C, 0x92, 0x90, 0xD1, 0x9C, 0xD0, 0xD0, 0x18, 0xA3, 0xA3, 0xFC, 0x8C, 0x28, 0x5A, 0xD4, 0x91, 0x4D, 0x08, 0xC3, 0xF6, 0x1A, 0xC8, 0xDD, 0xA6, 0x08, 0x58, 0xE2, 0x15, 0x95, 0xFB, 0x2D, 0x2D, 0x8A, 0xB1, 0x30, 0x80, 0xBD, 0x9A, 0xB6, 0xE1, 0x2C, 0x20, 0x3E, 0xDD, 0xC4, 0xC7, 0x55, 0x65, 0xCF, 0x28, 0x17, 0xF4, 0xEE, 0xDA, 0xBE, 0x77, 0x70, 0xD5, 0x52, 0xD6, 0x15, 0x7A, 0xFB, 0xAD, 0xAF, 0xFD, 0xD5, 0x45, 0x90, 0x5A, 0xE6, 0x31, 0x42, 0xD7, 0x84, 0xB3, 0x49, 0x56, 0x6A, 0xD3, 0x47, 0xF3, 0xBF, 0x68, 0x60, 0x8B, 0x0F, 0xE2, 0xAF, 0xF4, 0xE3, 0xEC, 0x12, 0xB9, 0xE2, 0x3A, 0x16, 0x11, 0x4E, 0x4D, 0x73, 0x79, 0xAF, 0x47, 0x85, 0x4C, 0x76, 0x26, 0x9E, 0x8B, 0x32, 0xC0, 0x8E, 0xC2, 0xDC, 0x27, 0xA6, 0xEF, 0xAC, 0x93, 0x9E, 0xA1, 0x5E, 0xCF, 0x34, 0x45, 0xE0, 0x2A, 0xC7, 0x9D, 0x4D, 0xD7, 0xD7, 0x37, 0x72, 0x97, 0xF8, 0x58, 0xF9, 0xB6, 0x35, 0x48, 0xF1, 0xD1, 0x0A, 0x72, 0x7F, 0xFD, 0x4D, 0x7C, 0xE9, 0xCC, 0xD8, 0x48, 0x1B, 0x49, 0x52, 0x53, 0xDE, 0x51, 0x01, 0x53, 0x35, 0xBC, 0x90, 0xCD, 0x8C, 0x8A, 0xCC, 0x43, 0x20, 0xA7, 0x45, 0xFF, 0x2B, 0x55, 0xB0, 0x8B, 0x2D, 0xFF, 0x55, 0x15, 0x4B, 0x84, 0xD0, 0xC3, 0xD3, 0x90, 0x9C, 0x94, 0x4B, 0x55, 0xD5, 0x62, 0xEA, 0x22, 0xAB, 0x62, 0x68, 0xDD, 0x53, 0xC6, 0xDC, 0xA5, 0xDD, 0x9A, 0x2D, 0x8E, 0x79, 0x7C, 0x2E, 0x9C, 0xE4, 0x66, 0x80, 0x8C, 0x1D} \ No newline at end of file
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer.gFmpDevicePkgTokenSpaceGuid.PcdFmpDevicePkcs7CertBufferXdr.inc b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer.gFmpDevicePkgTokenSpaceGuid.PcdFmpDevicePkcs7CertBufferXdr.inc
new file mode 100644
index 00000000..69f7341d
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.cer.gFmpDevicePkgTokenSpaceGuid.PcdFmpDevicePkcs7CertBufferXdr.inc
@@ -0,0 +1 @@
+ gFmpDevicePkgTokenSpaceGuid.PcdFmpDevicePkcs7CertBufferXdr|{0x00, 0x00, 0x03, 0xF0, 0x30, 0x82, 0x03, 0xEC, 0x30, 0x82, 0x02, 0xD4, 0xA0, 0x03, 0x02, 0x01, 0x02, 0x02, 0x09, 0x00, 0xC0, 0x91, 0xC5, 0xE2, 0xB7, 0x66, 0xC0, 0xF8, 0x30, 0x0D, 0x06, 0x09, 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x01, 0x0B, 0x05, 0x00, 0x30, 0x81, 0x82, 0x31, 0x0B, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x43, 0x4E, 0x31, 0x0B, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x08, 0x0C, 0x02, 0x53, 0x48, 0x31, 0x0B, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x07, 0x0C, 0x02, 0x53, 0x48, 0x31, 0x12, 0x30, 0x10, 0x06, 0x03, 0x55, 0x04, 0x0A, 0x0C, 0x09, 0x54, 0x69, 0x61, 0x6E, 0x6F, 0x43, 0x6F, 0x72, 0x65, 0x31, 0x0E, 0x30, 0x0C, 0x06, 0x03, 0x55, 0x04, 0x0B, 0x0C, 0x05, 0x45, 0x44, 0x4B, 0x49, 0x49, 0x31, 0x11, 0x30, 0x0F, 0x06, 0x03, 0x55, 0x04, 0x03, 0x0C, 0x08, 0x54, 0x65, 0x73, 0x74, 0x52, 0x6F, 0x6F, 0x74, 0x31, 0x22, 0x30, 0x20, 0x06, 0x09, 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x09, 0x01, 0x16, 0x13, 0x65, 0x64, 0x6B, 0x69, 0x69, 0x40, 0x74, 0x69, 0x61, 0x6E, 0x6F, 0x63, 0x6F, 0x72, 0x65, 0x2E, 0x6F, 0x72, 0x67, 0x30, 0x1E, 0x17, 0x0D, 0x31, 0x37, 0x30, 0x34, 0x31, 0x30, 0x30, 0x38, 0x32, 0x37, 0x34, 0x30, 0x5A, 0x17, 0x0D, 0x31, 0x37, 0x30, 0x35, 0x31, 0x30, 0x30, 0x38, 0x32, 0x37, 0x34, 0x30, 0x5A, 0x30, 0x81, 0x82, 0x31, 0x0B, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x43, 0x4E, 0x31, 0x0B, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x08, 0x0C, 0x02, 0x53, 0x48, 0x31, 0x0B, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x07, 0x0C, 0x02, 0x53, 0x48, 0x31, 0x12, 0x30, 0x10, 0x06, 0x03, 0x55, 0x04, 0x0A, 0x0C, 0x09, 0x54, 0x69, 0x61, 0x6E, 0x6F, 0x43, 0x6F, 0x72, 0x65, 0x31, 0x0E, 0x30, 0x0C, 0x06, 0x03, 0x55, 0x04, 0x0B, 0x0C, 0x05, 0x45, 0x44, 0x4B, 0x49, 0x49, 0x31, 0x11, 0x30, 0x0F, 0x06, 0x03, 0x55, 0x04, 0x03, 0x0C, 0x08, 0x54, 0x65, 0x73, 0x74, 0x52, 0x6F, 0x6F, 0x74, 0x31, 0x22, 0x30, 0x20, 0x06, 0x09, 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x09, 0x01, 0x16, 0x13, 0x65, 0x64, 0x6B, 0x69, 0x69, 0x40, 0x74, 0x69, 0x61, 0x6E, 0x6F, 0x63, 0x6F, 0x72, 0x65, 0x2E, 0x6F, 0x72, 0x67, 0x30, 0x82, 0x01, 0x22, 0x30, 0x0D, 0x06, 0x09, 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x01, 0x01, 0x05, 0x00, 0x03, 0x82, 0x01, 0x0F, 0x00, 0x30, 0x82, 0x01, 0x0A, 0x02, 0x82, 0x01, 0x01, 0x00, 0xB9, 0x29, 0x29, 0x6C, 0x60, 0x0C, 0xD7, 0x23, 0xF6, 0x7D, 0xEE, 0xF0, 0x62, 0xFF, 0xD9, 0xC9, 0xAA, 0x55, 0x8C, 0x81, 0x95, 0x56, 0x3F, 0xB7, 0x56, 0x53, 0xB0, 0xC2, 0x82, 0x12, 0xC5, 0x3B, 0x75, 0x23, 0xB9, 0x4D, 0xD6, 0xC4, 0x55, 0x73, 0xF3, 0xAA, 0x95, 0xA8, 0x1B, 0xF3, 0x93, 0x7E, 0x9E, 0x40, 0xE4, 0x1D, 0x22, 0x9C, 0x93, 0x07, 0x0B, 0xD7, 0xAA, 0x5B, 0xD7, 0xE4, 0x1A, 0x21, 0x84, 0xD7, 0x63, 0x59, 0x03, 0x50, 0x1F, 0xF5, 0x14, 0x55, 0x93, 0x91, 0x9B, 0xF5, 0x52, 0xB0, 0xBF, 0x0E, 0x5C, 0x68, 0x3B, 0x59, 0x52, 0x98, 0x96, 0x56, 0xE1, 0xAB, 0xC4, 0x43, 0xBB, 0x05, 0x57, 0x78, 0x45, 0x01, 0x9F, 0x58, 0x15, 0x53, 0x0E, 0x11, 0x94, 0x2F, 0x0E, 0xF1, 0xA6, 0x19, 0xA2, 0x6E, 0x86, 0x39, 0x2B, 0x33, 0x8D, 0xC7, 0xC5, 0xEB, 0xEE, 0x1E, 0x33, 0xD3, 0x32, 0x94, 0xC1, 0x59, 0xC4, 0x0C, 0x97, 0x0B, 0x12, 0x48, 0x5F, 0x33, 0xF6, 0x60, 0x74, 0x7D, 0x57, 0xC2, 0x13, 0x2D, 0x7D, 0xA9, 0x87, 0xA3, 0x35, 0xEA, 0x91, 0x83, 0x3F, 0x67, 0x7A, 0x92, 0x1F, 0x01, 0x53, 0x9F, 0x62, 0x5F, 0x99, 0x12, 0xFD, 0x73, 0x1B, 0x2D, 0x9E, 0x2B, 0x6C, 0x34, 0x49, 0xAF, 0x4F, 0x07, 0x8F, 0xC0, 0xE9, 0x6B, 0x9E, 0x5F, 0x79, 0x35, 0xDA, 0x2A, 0x5C, 0x88, 0xEE, 0xF6, 0x48, 0x61, 0xDA, 0x96, 0xE3, 0x48, 0x46, 0xA0, 0x94, 0x1C, 0x9D, 0xF6, 0x5C, 0x87, 0x0E, 0xEF, 0x74, 0x09, 0x91, 0x0D, 0x3D, 0x5A, 0xE7, 0xC5, 0x4C, 0x8A, 0x7A, 0xAC, 0xA1, 0x85, 0xB6, 0x67, 0x44, 0x17, 0x55, 0x52, 0x3A, 0xE8, 0x11, 0x4D, 0x58, 0xA2, 0x93, 0x00, 0x62, 0xEA, 0x7B, 0x80, 0xED, 0xCF, 0xBD, 0xDF, 0x75, 0x80, 0x4B, 0xB9, 0x65, 0x63, 0xAD, 0x0B, 0x4D, 0x74, 0xFA, 0x59, 0x02, 0x03, 0x01, 0x00, 0x01, 0xA3, 0x63, 0x30, 0x61, 0x30, 0x1D, 0x06, 0x03, 0x55, 0x1D, 0x0E, 0x04, 0x16, 0x04, 0x14, 0x16, 0xAA, 0xD6, 0x8E, 0x1B, 0x2D, 0x43, 0xF3, 0x2D, 0xB0, 0x24, 0xAD, 0x36, 0x65, 0x3F, 0xB2, 0xFA, 0xB1, 0x2C, 0xED, 0x30, 0x1F, 0x06, 0x03, 0x55, 0x1D, 0x23, 0x04, 0x18, 0x30, 0x16, 0x80, 0x14, 0x16, 0xAA, 0xD6, 0x8E, 0x1B, 0x2D, 0x43, 0xF3, 0x2D, 0xB0, 0x24, 0xAD, 0x36, 0x65, 0x3F, 0xB2, 0xFA, 0xB1, 0x2C, 0xED, 0x30, 0x0F, 0x06, 0x03, 0x55, 0x1D, 0x13, 0x01, 0x01, 0xFF, 0x04, 0x05, 0x30, 0x03, 0x01, 0x01, 0xFF, 0x30, 0x0E, 0x06, 0x03, 0x55, 0x1D, 0x0F, 0x01, 0x01, 0xFF, 0x04, 0x04, 0x03, 0x02, 0x01, 0x86, 0x30, 0x0D, 0x06, 0x09, 0x2A, 0x86, 0x48, 0x86, 0xF7, 0x0D, 0x01, 0x01, 0x0B, 0x05, 0x00, 0x03, 0x82, 0x01, 0x01, 0x00, 0x95, 0xDE, 0xDF, 0xA4, 0x14, 0xDB, 0x92, 0x22, 0x78, 0x1A, 0xBD, 0x31, 0x9D, 0x1E, 0xD7, 0x2F, 0x0A, 0x10, 0x11, 0x5D, 0x74, 0x61, 0xE8, 0x30, 0xC4, 0xF3, 0x15, 0xE9, 0x30, 0x54, 0xF4, 0xBB, 0x0C, 0x04, 0x78, 0x13, 0x5D, 0x2C, 0xDD, 0x8C, 0x92, 0x90, 0xD1, 0x9C, 0xD0, 0xD0, 0x18, 0xA3, 0xA3, 0xFC, 0x8C, 0x28, 0x5A, 0xD4, 0x91, 0x4D, 0x08, 0xC3, 0xF6, 0x1A, 0xC8, 0xDD, 0xA6, 0x08, 0x58, 0xE2, 0x15, 0x95, 0xFB, 0x2D, 0x2D, 0x8A, 0xB1, 0x30, 0x80, 0xBD, 0x9A, 0xB6, 0xE1, 0x2C, 0x20, 0x3E, 0xDD, 0xC4, 0xC7, 0x55, 0x65, 0xCF, 0x28, 0x17, 0xF4, 0xEE, 0xDA, 0xBE, 0x77, 0x70, 0xD5, 0x52, 0xD6, 0x15, 0x7A, 0xFB, 0xAD, 0xAF, 0xFD, 0xD5, 0x45, 0x90, 0x5A, 0xE6, 0x31, 0x42, 0xD7, 0x84, 0xB3, 0x49, 0x56, 0x6A, 0xD3, 0x47, 0xF3, 0xBF, 0x68, 0x60, 0x8B, 0x0F, 0xE2, 0xAF, 0xF4, 0xE3, 0xEC, 0x12, 0xB9, 0xE2, 0x3A, 0x16, 0x11, 0x4E, 0x4D, 0x73, 0x79, 0xAF, 0x47, 0x85, 0x4C, 0x76, 0x26, 0x9E, 0x8B, 0x32, 0xC0, 0x8E, 0xC2, 0xDC, 0x27, 0xA6, 0xEF, 0xAC, 0x93, 0x9E, 0xA1, 0x5E, 0xCF, 0x34, 0x45, 0xE0, 0x2A, 0xC7, 0x9D, 0x4D, 0xD7, 0xD7, 0x37, 0x72, 0x97, 0xF8, 0x58, 0xF9, 0xB6, 0x35, 0x48, 0xF1, 0xD1, 0x0A, 0x72, 0x7F, 0xFD, 0x4D, 0x7C, 0xE9, 0xCC, 0xD8, 0x48, 0x1B, 0x49, 0x52, 0x53, 0xDE, 0x51, 0x01, 0x53, 0x35, 0xBC, 0x90, 0xCD, 0x8C, 0x8A, 0xCC, 0x43, 0x20, 0xA7, 0x45, 0xFF, 0x2B, 0x55, 0xB0, 0x8B, 0x2D, 0xFF, 0x55, 0x15, 0x4B, 0x84, 0xD0, 0xC3, 0xD3, 0x90, 0x9C, 0x94, 0x4B, 0x55, 0xD5, 0x62, 0xEA, 0x22, 0xAB, 0x62, 0x68, 0xDD, 0x53, 0xC6, 0xDC, 0xA5, 0xDD, 0x9A, 0x2D, 0x8E, 0x79, 0x7C, 0x2E, 0x9C, 0xE4, 0x66, 0x80, 0x8C, 0x1D} \ No newline at end of file
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.pem b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.pem
new file mode 100644
index 00000000..79aeae49
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.pem
@@ -0,0 +1,58 @@
+Bag Attributes
+ localKeyID: F4 2E C8 1D 29 A0 02 47 B7 93 2B 69 8D 8D D1 33 7A E3 09 30
+subject=/C=CN/ST=SH/L=SH/O=TianoCore/OU=EDKII/CN=TestRoot/emailAddress=edkii@tianocore.org
+issuer=/C=CN/ST=SH/L=SH/O=TianoCore/OU=EDKII/CN=TestRoot/emailAddress=edkii@tianocore.org
+-----BEGIN CERTIFICATE-----
+MIID7DCCAtSgAwIBAgIJAMCRxeK3ZsD4MA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJDTjELMAkGA1UECAwCU0gxCzAJBgNVBAcMAlNIMRIwEAYDVQQKDAlUaWFu
+b0NvcmUxDjAMBgNVBAsMBUVES0lJMREwDwYDVQQDDAhUZXN0Um9vdDEiMCAGCSqG
+SIb3DQEJARYTZWRraWlAdGlhbm9jb3JlLm9yZzAeFw0xNzA0MTAwODI3NDBaFw0x
+NzA1MTAwODI3NDBaMIGCMQswCQYDVQQGEwJDTjELMAkGA1UECAwCU0gxCzAJBgNV
+BAcMAlNIMRIwEAYDVQQKDAlUaWFub0NvcmUxDjAMBgNVBAsMBUVES0lJMREwDwYD
+VQQDDAhUZXN0Um9vdDEiMCAGCSqGSIb3DQEJARYTZWRraWlAdGlhbm9jb3JlLm9y
+ZzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALkpKWxgDNcj9n3u8GL/
+2cmqVYyBlVY/t1ZTsMKCEsU7dSO5TdbEVXPzqpWoG/OTfp5A5B0inJMHC9eqW9fk
+GiGE12NZA1Af9RRVk5Gb9VKwvw5caDtZUpiWVuGrxEO7BVd4RQGfWBVTDhGULw7x
+phmiboY5KzONx8Xr7h4z0zKUwVnEDJcLEkhfM/ZgdH1XwhMtfamHozXqkYM/Z3qS
+HwFTn2JfmRL9cxstnitsNEmvTwePwOlrnl95NdoqXIju9khh2pbjSEaglByd9lyH
+Du90CZENPVrnxUyKeqyhhbZnRBdVUjroEU1YopMAYup7gO3Pvd91gEu5ZWOtC010
++lkCAwEAAaNjMGEwHQYDVR0OBBYEFBaq1o4bLUPzLbAkrTZlP7L6sSztMB8GA1Ud
+IwQYMBaAFBaq1o4bLUPzLbAkrTZlP7L6sSztMA8GA1UdEwEB/wQFMAMBAf8wDgYD
+VR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4IBAQCV3t+kFNuSIngavTGdHtcv
+ChARXXRh6DDE8xXpMFT0uwwEeBNdLN2MkpDRnNDQGKOj/IwoWtSRTQjD9hrI3aYI
+WOIVlfstLYqxMIC9mrbhLCA+3cTHVWXPKBf07tq+d3DVUtYVevutr/3VRZBa5jFC
+14SzSVZq00fzv2hgiw/ir/Tj7BK54joWEU5Nc3mvR4VMdiaeizLAjsLcJ6bvrJOe
+oV7PNEXgKsedTdfXN3KX+Fj5tjVI8dEKcn/9TXzpzNhIG0lSU95RAVM1vJDNjIrM
+QyCnRf8rVbCLLf9VFUuE0MPTkJyUS1XVYuoiq2Jo3VPG3KXdmi2OeXwunORmgIwd
+-----END CERTIFICATE-----
+Bag Attributes
+ localKeyID: F4 2E C8 1D 29 A0 02 47 B7 93 2B 69 8D 8D D1 33 7A E3 09 30
+Key Attributes: <No Attributes>
+-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC5KSlsYAzXI/Z9
+7vBi/9nJqlWMgZVWP7dWU7DCghLFO3UjuU3WxFVz86qVqBvzk36eQOQdIpyTBwvX
+qlvX5BohhNdjWQNQH/UUVZORm/VSsL8OXGg7WVKYllbhq8RDuwVXeEUBn1gVUw4R
+lC8O8aYZom6GOSszjcfF6+4eM9MylMFZxAyXCxJIXzP2YHR9V8ITLX2ph6M16pGD
+P2d6kh8BU59iX5kS/XMbLZ4rbDRJr08Hj8Dpa55feTXaKlyI7vZIYdqW40hGoJQc
+nfZchw7vdAmRDT1a58VMinqsoYW2Z0QXVVI66BFNWKKTAGLqe4Dtz73fdYBLuWVj
+rQtNdPpZAgMBAAECggEAci5d6wT4Jht5P4N/Ha2kweWWR8UJMFyuVD/bura3mITn
+4ZW92HjOMWjLgupeAkCsTi65/PWBFHG97cqSRHnXW2At6ofTsS9j1JxJGfvQtqNj
+zhlR9XdJperfvN5Nc277BkuWUj/O86d5/4Ef29lMknZGLeNHLs15qiWpe1p+HKvt
++DfL7Prl5qWA5G90QmXgRQJbThl1TYLCYkETB+9m3MIRm8Z01XKH+fm4ahgclEkG
+XaQl04DhMEo7A/sC8NUnozOMEf81Ixkt3wEpoEDtZ+WhRTrgLF23Q4sXAIBMlEfz
+Pz2UaX/9KBT1dRbZseStIjJKMc8qd+pC7Ww2tuHEOQKBgQDmLdFSgHc2URQW/Otj
+fr9S/Z7EPSOA/tmh4dFhQGwzKF4Us838deRz2cRTbgq5BHuBCrMEPRBiX8h4WLEB
+NVZ73JjgOfyshcDXWNg5noc9f24HYHMZnjcFmHNokpyIgxLl2qgN8f03doJEmKkj
+pm/VnfZmkGDd65IXRp8MYMTQOwKBgQDN7ofqKWK5SA+vt+tDOkCYq6eHKb9+ImPh
+PreikT5xc9SMtb0tGlIjKydsiqA9Jv1WRnpUG0fVfMyagBSOrKt9wC143VEvOtkR
+QJehmLLYG97HP7CXtniAWeKuc2pfCd+nGdHLFmduuTEEDcxab5LQc5dvYQ/RfznF
+YVunt73qewKBgQCg11VUpCYpU2CJa7SEMtY4hLbDg8FiazLiVqx7m4u/964+IyKG
+Dk9T0NDKR7PAc2xl0HclOBJR24J27erJ4F6NcKl2za5NU61cDV4SbT8tbvUQvInR
+Veg2xb+nTAOLtKOo8DDMhdMeRXZjvpU6LxwolhfOtYaqq+jK0PNkr933bwKBgA0G
+RiBgR7cyQJO7jSyuVYGSccERuePPZwPLBLBKgWmJiurvX6ynmoRQ6WhrCCF2AtXf
+FUOWih+Nih9HdIVllF8atYWMceML1MjLjguRbdZPRPLTK2cdClgL11NzR0oFhNi7
+wFIY86fEHL6F5OPfZKi8dtp7iBWW919tfe+IpoFbAoGBAMzNKKBHG5eMuKQI/Dww
+50PDHu25TGUiTc1bHx18v7mGlcvhEPkDYAKd3y7FN5VRoooarGYlLDHXez0FvDTa
+ABFUUad70bULTqRTSmld0I9CWWnYs0vaFKgIemddQ7W2eXr7N+N+ED+OK/PvWjMq
+LMKhChf252RfOYdB+WN6alVG
+-----END PRIVATE KEY-----
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.pub.pem b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.pub.pem
new file mode 100644
index 00000000..728d3362
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestRoot.pub.pem
@@ -0,0 +1,23 @@
+-----BEGIN CERTIFICATE-----
+MIID7DCCAtSgAwIBAgIJAMCRxeK3ZsD4MA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJDTjELMAkGA1UECAwCU0gxCzAJBgNVBAcMAlNIMRIwEAYDVQQKDAlUaWFu
+b0NvcmUxDjAMBgNVBAsMBUVES0lJMREwDwYDVQQDDAhUZXN0Um9vdDEiMCAGCSqG
+SIb3DQEJARYTZWRraWlAdGlhbm9jb3JlLm9yZzAeFw0xNzA0MTAwODI3NDBaFw0x
+NzA1MTAwODI3NDBaMIGCMQswCQYDVQQGEwJDTjELMAkGA1UECAwCU0gxCzAJBgNV
+BAcMAlNIMRIwEAYDVQQKDAlUaWFub0NvcmUxDjAMBgNVBAsMBUVES0lJMREwDwYD
+VQQDDAhUZXN0Um9vdDEiMCAGCSqGSIb3DQEJARYTZWRraWlAdGlhbm9jb3JlLm9y
+ZzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALkpKWxgDNcj9n3u8GL/
+2cmqVYyBlVY/t1ZTsMKCEsU7dSO5TdbEVXPzqpWoG/OTfp5A5B0inJMHC9eqW9fk
+GiGE12NZA1Af9RRVk5Gb9VKwvw5caDtZUpiWVuGrxEO7BVd4RQGfWBVTDhGULw7x
+phmiboY5KzONx8Xr7h4z0zKUwVnEDJcLEkhfM/ZgdH1XwhMtfamHozXqkYM/Z3qS
+HwFTn2JfmRL9cxstnitsNEmvTwePwOlrnl95NdoqXIju9khh2pbjSEaglByd9lyH
+Du90CZENPVrnxUyKeqyhhbZnRBdVUjroEU1YopMAYup7gO3Pvd91gEu5ZWOtC010
++lkCAwEAAaNjMGEwHQYDVR0OBBYEFBaq1o4bLUPzLbAkrTZlP7L6sSztMB8GA1Ud
+IwQYMBaAFBaq1o4bLUPzLbAkrTZlP7L6sSztMA8GA1UdEwEB/wQFMAMBAf8wDgYD
+VR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4IBAQCV3t+kFNuSIngavTGdHtcv
+ChARXXRh6DDE8xXpMFT0uwwEeBNdLN2MkpDRnNDQGKOj/IwoWtSRTQjD9hrI3aYI
+WOIVlfstLYqxMIC9mrbhLCA+3cTHVWXPKBf07tq+d3DVUtYVevutr/3VRZBa5jFC
+14SzSVZq00fzv2hgiw/ir/Tj7BK54joWEU5Nc3mvR4VMdiaeizLAjsLcJ6bvrJOe
+oV7PNEXgKsedTdfXN3KX+Fj5tjVI8dEKcn/9TXzpzNhIG0lSU95RAVM1vJDNjIrM
+QyCnRf8rVbCLLf9VFUuE0MPTkJyUS1XVYuoiq2Jo3VPG3KXdmi2OeXwunORmgIwd
+-----END CERTIFICATE-----
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestSub.pem b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestSub.pem
new file mode 100644
index 00000000..2f7d3549
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestSub.pem
@@ -0,0 +1,59 @@
+Certificate:
+ Data:
+ Version: 3 (0x2)
+ Serial Number: 4098 (0x1002)
+ Signature Algorithm: sha256WithRSAEncryption
+ Issuer: C = CN, ST = SH, L = SH, O = TianoCore, OU = EDKII, CN = TestRoot, emailAddress = edkii@tianocore.org
+ Validity
+ Not Before: Apr 10 08:33:45 2017 GMT
+ Not After : Apr 10 08:33:45 2018 GMT
+ Subject: C = CN, ST = SH, O = TianoCore, OU = EDKII, CN = TestSub, emailAddress = edkii@tianocore.org
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ Public-Key: (2048 bit)
+ Modulus:
+ 00:c5:3a:af:16:34:9a:14:61:74:8c:39:1a:04:1f:
+ 7b:95:d3:40:b7:ea:26:a7:7b:8d:76:d3:86:1b:7c:
+ 07:17:d2:56:72:36:13:b4:6c:75:b7:bf:d1:35:d1:
+ 31:d5:9a:07:c1:62:4e:aa:3d:bd:d8:40:8b:48:9a:
+ c5:46:c4:c3:10:2c:d4:82:d9:6d:f4:c3:de:85:fa:
+ 34:1d:d1:74:7a:5f:16:34:59:2b:2b:03:61:46:62:
+ d7:88:62:59:4d:d8:55:00:52:54:e1:15:5e:a9:ec:
+ d6:e8:51:fd:ef:8e:68:5f:d2:40:d2:61:ef:2c:1d:
+ 5b:a7:6e:14:4c:12:bc:60:81:8e:66:c9:84:51:c2:
+ 89:51:fc:e5:7f:86:9a:78:a4:c1:f7:0f:a9:a5:97:
+ 60:dd:6f:c8:a0:fd:ea:07:2f:01:36:0a:e8:bd:0e:
+ dc:48:2e:85:22:7b:bb:db:68:78:eb:cd:6a:54:07:
+ f7:81:a5:52:8f:f3:5c:09:1e:76:a3:d1:91:8f:ee:
+ 86:2c:85:49:99:96:4f:5f:5b:0d:08:ae:d8:20:e8:
+ e3:67:70:c6:ec:0e:0e:bd:bf:3c:f6:db:e4:45:d5:
+ 7a:bb:9f:d1:3b:18:89:fc:63:ac:c2:30:b8:fa:bb:
+ 8a:24:63:4e:79:58:78:72:ab:27:36:3d:bb:4f:47:
+ d6:ef
+ Exponent: 65537 (0x10001)
+ X509v3 extensions:
+ X509v3 Subject Key Identifier:
+ D6:9D:66:D6:49:7C:FA:20:8D:5D:75:69:2A:41:0A:7A:03:5A:A5:EB
+ X509v3 Authority Key Identifier:
+ keyid:16:AA:D6:8E:1B:2D:43:F3:2D:B0:24:AD:36:65:3F:B2:FA:B1:2C:ED
+
+ X509v3 Basic Constraints: critical
+ CA:TRUE
+ X509v3 Key Usage: critical
+ Digital Signature, Certificate Sign, CRL Sign
+ Signature Algorithm: sha256WithRSAEncryption
+ 83:3c:ae:b2:fc:99:3d:33:b3:da:ca:26:83:8c:a9:ae:f8:bb:
+ ad:05:37:97:a5:f8:0d:2b:4e:3e:e5:b7:12:68:f8:64:d4:bd:
+ ff:65:7d:57:98:61:cd:47:10:a5:6a:bd:66:89:74:ce:5e:28:
+ 29:39:67:c9:1f:54:ec:78:76:b1:dd:04:91:63:b6:8c:2f:86:
+ 59:1f:c4:2b:a1:4a:8c:a8:5b:f6:8a:92:f0:83:bb:92:92:5c:
+ b1:1c:18:95:3d:d6:be:6d:79:9d:4f:7b:92:1f:68:f5:1f:cd:
+ f4:37:2d:1e:e3:f6:eb:f2:8a:a4:8d:a1:c5:db:0c:3a:59:01:
+ dc:be:a9:c1:0b:04:ba:e8:02:a9:85:cd:d7:48:0d:f6:60:30:
+ 2b:05:ba:e0:c7:d8:9f:23:14:37:04:0a:a7:bc:b6:c8:25:31:
+ e4:9a:41:a5:83:c2:ee:89:d3:fa:a5:7c:ae:a6:14:22:a4:5f:
+ 73:03:f2:7b:3c:51:f7:76:2a:0a:cf:ee:71:35:1c:bc:ff:3f:
+ 9b:d5:b1:33:e0:b6:fc:2a:c8:ab:84:89:cd:fa:1c:ee:12:8c:
+ 07:ba:93:46:50:b3:3f:73:05:be:67:58:60:90:05:2c:d3:b6:
+ 19:7c:a4:f0:6e:ee:d4:f2:0e:f5:02:79:5f:2c:28:83:1e:83:
+ c6:92:ba:7c
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestSub.pub.pem b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestSub.pub.pem
new file mode 100644
index 00000000..91cb90a4
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Pkcs7Sign/TestSub.pub.pem
@@ -0,0 +1,23 @@
+-----BEGIN CERTIFICATE-----
+MIID1jCCAr6gAwIBAgICEAIwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNVBAYTAkNO
+MQswCQYDVQQIDAJTSDELMAkGA1UEBwwCU0gxEjAQBgNVBAoMCVRpYW5vQ29yZTEO
+MAwGA1UECwwFRURLSUkxETAPBgNVBAMMCFRlc3RSb290MSIwIAYJKoZIhvcNAQkB
+FhNlZGtpaUB0aWFub2NvcmUub3JnMB4XDTE3MDQxMDA4MzM0NVoXDTE4MDQxMDA4
+MzM0NVowdDELMAkGA1UEBhMCQ04xCzAJBgNVBAgMAlNIMRIwEAYDVQQKDAlUaWFu
+b0NvcmUxDjAMBgNVBAsMBUVES0lJMRAwDgYDVQQDDAdUZXN0U3ViMSIwIAYJKoZI
+hvcNAQkBFhNlZGtpaUB0aWFub2NvcmUub3JnMIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEAxTqvFjSaFGF0jDkaBB97ldNAt+omp3uNdtOGG3wHF9JWcjYT
+tGx1t7/RNdEx1ZoHwWJOqj292ECLSJrFRsTDECzUgtlt9MPehfo0HdF0el8WNFkr
+KwNhRmLXiGJZTdhVAFJU4RVeqezW6FH9745oX9JA0mHvLB1bp24UTBK8YIGOZsmE
+UcKJUfzlf4aaeKTB9w+ppZdg3W/IoP3qBy8BNgrovQ7cSC6FInu722h4681qVAf3
+gaVSj/NcCR52o9GRj+6GLIVJmZZPX1sNCK7YIOjjZ3DG7A4Ovb889tvkRdV6u5/R
+OxiJ/GOswjC4+ruKJGNOeVh4cqsnNj27T0fW7wIDAQABo2MwYTAdBgNVHQ4EFgQU
+1p1m1kl8+iCNXXVpKkEKegNapeswHwYDVR0jBBgwFoAUFqrWjhstQ/MtsCStNmU/
+svqxLO0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcN
+AQELBQADggEBAIM8rrL8mT0zs9rKJoOMqa74u60FN5el+A0rTj7ltxJo+GTUvf9l
+fVeYYc1HEKVqvWaJdM5eKCk5Z8kfVOx4drHdBJFjtowvhlkfxCuhSoyoW/aKkvCD
+u5KSXLEcGJU91r5teZ1Pe5IfaPUfzfQ3LR7j9uvyiqSNocXbDDpZAdy+qcELBLro
+AqmFzddIDfZgMCsFuuDH2J8jFDcECqe8tsglMeSaQaWDwu6J0/qlfK6mFCKkX3MD
+8ns8Ufd2KgrP7nE1HLz/P5vVsTPgtvwqyKuEic36HO4SjAe6k0ZQsz9zBb5nWGCQ
+BSzTthl8pPBu7tTyDvUCeV8sKIMeg8aSunw=
+-----END CERTIFICATE-----
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/README.md b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/README.md
new file mode 100644
index 00000000..7266b8f8
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/README.md
@@ -0,0 +1,29 @@
+# Edk2 Basetools
+
+This folder has traditionally held the source of Python based tools used by EDK2.
+The official repo this source has moved to https://github.com/tianocore/edk2-basetools.
+This folder will remain in the tree until the next stable release (expected 202102).
+There is a new folder under Basetools `BinPipWrappers` that uses the pip module rather than this tree for Basetools.
+By adding the scope `pipbuild-win` or `pipbuild-unix` (depending on your host system), the SDE will use the
+`BinPipWrappers` instead of the regular `BinWrappers`.
+
+## Why Move It?
+
+The discussion is on the mailing list. The RFC is here: https://edk2.groups.io/g/rfc/topic/74009714#270
+The benefits allow for the Basetools project to be used separately from EDK2 itself as well as offering it in a
+globally accessible manner.
+This makes it much easier to build a module using Basetools.
+Separating the Basetools into their own repo allows for easier CI and contribution process.
+Additional pros, cons, and process can be found on the mailing list.
+
+## How Do I Install It?
+
+By default, EDK2 is tied to and tested with a specific version of the Basetools through `pip-requirements.txt`.
+You can simply run:
+
+```bash
+pip install -r pip-requirements.txt
+```
+
+This will install the required module, thought we strongly suggest setting up a virtual environment.
+Additionally, you can also install a local clone of the Basetools as well as a specific git commit.
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py
new file mode 100755
index 00000000..8292b057
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py
@@ -0,0 +1,170 @@
+## @file
+# This tool can be used to generate new RSA 2048 bit private/public key pairs
+# in a PEM file format using OpenSSL command line utilities that are installed
+# on the path specified by the system environment variable OPENSSL_PATH.
+# This tool can also optionally write one or more SHA 256 hashes of 2048 bit
+# public keys to a binary file, write one or more SHA 256 hashes of 2048 bit
+# public keys to a file in a C structure format, and in verbose mode display
+# one or more SHA 256 hashes of 2048 bit public keys in a C structure format
+# on STDOUT.
+# This tool has been tested with OpenSSL 1.0.1e 11 Feb 2013
+#
+# Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Rsa2048Sha256GenerateKeys
+'''
+from __future__ import print_function
+
+import os
+import sys
+import argparse
+import subprocess
+from Common.BuildVersion import gBUILD_VERSION
+
+#
+# Globals for help information
+#
+__prog__ = 'Rsa2048Sha256GenerateKeys'
+__version__ = '%s Version %s' % (__prog__, '0.9 ' + gBUILD_VERSION)
+__copyright__ = 'Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.'
+__usage__ = '%s [options]' % (__prog__)
+
+
+if __name__ == '__main__':
+ #
+ # Create command line argument parser object
+ #
+ parser = argparse.ArgumentParser(prog=__prog__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
+ group = parser.add_mutually_exclusive_group(required=True)
+ group.add_argument("--version", action='version', version=__version__)
+ group.add_argument("-o", "--output", dest='OutputFile', type=argparse.FileType('wb'), metavar='filename', nargs='*', help="specify the output private key filename in PEM format")
+ group.add_argument("-i", "--input", dest='InputFile', type=argparse.FileType('rb'), metavar='filename', nargs='*', help="specify the input private key filename in PEM format")
+ parser.add_argument("--public-key-hash", dest='PublicKeyHashFile', type=argparse.FileType('wb'), help="specify the public key hash filename that is SHA 256 hash of 2048 bit RSA public key in binary format")
+ parser.add_argument("--public-key-hash-c", dest='PublicKeyHashCFile', type=argparse.FileType('wb'), help="specify the public key hash filename that is SHA 256 hash of 2048 bit RSA public key in C structure format")
+ parser.add_argument("-v", "--verbose", dest='Verbose', action="store_true", help="increase output messages")
+ parser.add_argument("-q", "--quiet", dest='Quiet', action="store_true", help="reduce output messages")
+ parser.add_argument("--debug", dest='Debug', type=int, metavar='[0-9]', choices=range(0, 10), default=0, help="set debug level")
+
+ #
+ # Parse command line arguments
+ #
+ args = parser.parse_args()
+
+ #
+ # Generate file path to Open SSL command
+ #
+ OpenSslCommand = 'openssl'
+ try:
+ OpenSslPath = os.environ['OPENSSL_PATH']
+ OpenSslCommand = os.path.join(OpenSslPath, OpenSslCommand)
+ if ' ' in OpenSslCommand:
+ OpenSslCommand = '"' + OpenSslCommand + '"'
+ except:
+ pass
+
+ #
+ # Verify that Open SSL command is available
+ #
+ try:
+ Process = subprocess.Popen('%s version' % (OpenSslCommand), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ except:
+ print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
+ sys.exit(1)
+
+ Version = Process.communicate()
+ if Process.returncode != 0:
+ print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
+ sys.exit(Process.returncode)
+ print(Version[0].decode())
+
+ args.PemFileName = []
+
+ #
+ # Check for output file argument
+ #
+ if args.OutputFile is not None:
+ for Item in args.OutputFile:
+ #
+ # Save PEM filename and close output file
+ #
+ args.PemFileName.append(Item.name)
+ Item.close()
+
+ #
+ # Generate private key and save it to output file in a PEM file format
+ #
+ Process = subprocess.Popen('%s genrsa -out %s 2048' % (OpenSslCommand, Item.name), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ Process.communicate()
+ if Process.returncode != 0:
+ print('ERROR: RSA 2048 key generation failed')
+ sys.exit(Process.returncode)
+
+ #
+ # Check for input file argument
+ #
+ if args.InputFile is not None:
+ for Item in args.InputFile:
+ #
+ # Save PEM filename and close input file
+ #
+ args.PemFileName.append(Item.name)
+ Item.close()
+
+ PublicKeyHash = bytearray()
+ for Item in args.PemFileName:
+ #
+ # Extract public key from private key into STDOUT
+ #
+ Process = subprocess.Popen('%s rsa -in %s -modulus -noout' % (OpenSslCommand, Item), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ PublicKeyHexString = Process.communicate()[0].decode().split(b'=')[1].strip()
+ if Process.returncode != 0:
+ print('ERROR: Unable to extract public key from private key')
+ sys.exit(Process.returncode)
+ PublicKey = bytearray()
+ for Index in range (0, len(PublicKeyHexString), 2):
+ PublicKey = PublicKey + PublicKeyHexString[Index:Index + 2]
+
+ #
+ # Generate SHA 256 hash of RSA 2048 bit public key into STDOUT
+ #
+ Process = subprocess.Popen('%s dgst -sha256 -binary' % (OpenSslCommand), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ Process.stdin.write (PublicKey)
+ PublicKeyHash = PublicKeyHash + Process.communicate()[0].decode()
+ if Process.returncode != 0:
+ print('ERROR: Unable to extract SHA 256 hash of public key')
+ sys.exit(Process.returncode)
+
+ #
+ # Write SHA 256 hash of 2048 bit binary public key to public key hash file
+ #
+ try:
+ args.PublicKeyHashFile.write (PublicKeyHash)
+ args.PublicKeyHashFile.close ()
+ except:
+ pass
+
+ #
+ # Convert public key hash to a C structure string
+ #
+ PublicKeyHashC = '{'
+ for Item in PublicKeyHash:
+ PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (Item)
+ PublicKeyHashC = PublicKeyHashC[:-2] + '}'
+
+ #
+ # Write SHA 256 of 2048 bit binary public key to public key hash C structure file
+ #
+ try:
+ args.PublicKeyHashCFile.write (bytes(PublicKeyHashC))
+ args.PublicKeyHashCFile.close ()
+ except:
+ pass
+
+ #
+ # If verbose is enabled display the public key in C structure format
+ #
+ if args.Verbose:
+ print('PublicKeySha256 = ' + PublicKeyHashC)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py
new file mode 100755
index 00000000..d894a2ec
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py
@@ -0,0 +1,235 @@
+## @file
+# This tool encodes and decodes GUIDed FFS sections or FMP capsule for a GUID type of
+# EFI_CERT_TYPE_RSA2048_SHA256_GUID defined in the UEFI 2.4 Specification as
+# {0xa7717414, 0xc616, 0x4977, {0x94, 0x20, 0x84, 0x47, 0x12, 0xa7, 0x35, 0xbf}}
+# This tool has been tested with OpenSSL 1.0.1e 11 Feb 2013
+#
+# Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Rsa2048Sha256Sign
+'''
+from __future__ import print_function
+
+import os
+import sys
+import argparse
+import subprocess
+import uuid
+import struct
+import collections
+from Common.BuildVersion import gBUILD_VERSION
+
+#
+# Globals for help information
+#
+__prog__ = 'Rsa2048Sha256Sign'
+__version__ = '%s Version %s' % (__prog__, '0.9 ' + gBUILD_VERSION)
+__copyright__ = 'Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.'
+__usage__ = '%s -e|-d [options] <input_file>' % (__prog__)
+
+#
+# GUID for SHA 256 Hash Algorithm from UEFI Specification
+#
+EFI_HASH_ALGORITHM_SHA256_GUID = uuid.UUID('{51aa59de-fdf2-4ea3-bc63-875fb7842ee9}')
+
+#
+# Structure definition to unpack EFI_CERT_BLOCK_RSA_2048_SHA256 from UEFI 2.4 Specification
+#
+# typedef struct _EFI_CERT_BLOCK_RSA_2048_SHA256 {
+# EFI_GUID HashType;
+# UINT8 PublicKey[256];
+# UINT8 Signature[256];
+# } EFI_CERT_BLOCK_RSA_2048_SHA256;
+#
+EFI_CERT_BLOCK_RSA_2048_SHA256 = collections.namedtuple('EFI_CERT_BLOCK_RSA_2048_SHA256', ['HashType', 'PublicKey', 'Signature'])
+EFI_CERT_BLOCK_RSA_2048_SHA256_STRUCT = struct.Struct('16s256s256s')
+
+#
+# Filename of test signing private key that is stored in same directory as this tool
+#
+TEST_SIGNING_PRIVATE_KEY_FILENAME = 'TestSigningPrivateKey.pem'
+
+if __name__ == '__main__':
+ #
+ # Create command line argument parser object
+ #
+ parser = argparse.ArgumentParser(prog=__prog__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
+ group = parser.add_mutually_exclusive_group(required=True)
+ group.add_argument("-e", action="store_true", dest='Encode', help='encode file')
+ group.add_argument("-d", action="store_true", dest='Decode', help='decode file')
+ group.add_argument("--version", action='version', version=__version__)
+ parser.add_argument("-o", "--output", dest='OutputFile', type=str, metavar='filename', help="specify the output filename", required=True)
+ parser.add_argument("--monotonic-count", dest='MonotonicCountStr', type=str, help="specify the MonotonicCount in FMP capsule.")
+ parser.add_argument("--private-key", dest='PrivateKeyFile', type=argparse.FileType('rb'), help="specify the private key filename. If not specified, a test signing key is used.")
+ parser.add_argument("-v", "--verbose", dest='Verbose', action="store_true", help="increase output messages")
+ parser.add_argument("-q", "--quiet", dest='Quiet', action="store_true", help="reduce output messages")
+ parser.add_argument("--debug", dest='Debug', type=int, metavar='[0-9]', choices=range(0, 10), default=0, help="set debug level")
+ parser.add_argument(metavar="input_file", dest='InputFile', type=argparse.FileType('rb'), help="specify the input filename")
+
+ #
+ # Parse command line arguments
+ #
+ args = parser.parse_args()
+
+ #
+ # Generate file path to Open SSL command
+ #
+ OpenSslCommand = 'openssl'
+ try:
+ OpenSslPath = os.environ['OPENSSL_PATH']
+ OpenSslCommand = os.path.join(OpenSslPath, OpenSslCommand)
+ if ' ' in OpenSslCommand:
+ OpenSslCommand = '"' + OpenSslCommand + '"'
+ except:
+ pass
+
+ #
+ # Verify that Open SSL command is available
+ #
+ try:
+ Process = subprocess.Popen('%s version' % (OpenSslCommand), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ except:
+ print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
+ sys.exit(1)
+
+ Version = Process.communicate()
+ if Process.returncode != 0:
+ print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
+ sys.exit(Process.returncode)
+ print(Version[0].decode('utf-8'))
+
+ #
+ # Read input file into a buffer and save input filename
+ #
+ args.InputFileName = args.InputFile.name
+ args.InputFileBuffer = args.InputFile.read()
+ args.InputFile.close()
+
+ #
+ # Save output filename and check if path exists
+ #
+ OutputDir = os.path.dirname(args.OutputFile)
+ if not os.path.exists(OutputDir):
+ print('ERROR: The output path does not exist: %s' % OutputDir)
+ sys.exit(1)
+ args.OutputFileName = args.OutputFile
+
+ #
+ # Save private key filename and close private key file
+ #
+ try:
+ args.PrivateKeyFileName = args.PrivateKeyFile.name
+ args.PrivateKeyFile.close()
+ except:
+ try:
+ #
+ # Get path to currently executing script or executable
+ #
+ if hasattr(sys, 'frozen'):
+ RsaToolPath = sys.executable
+ else:
+ RsaToolPath = sys.argv[0]
+ if RsaToolPath.startswith('"'):
+ RsaToolPath = RsaToolPath[1:]
+ if RsaToolPath.endswith('"'):
+ RsaToolPath = RsaToolPath[:-1]
+ args.PrivateKeyFileName = os.path.join(os.path.dirname(os.path.realpath(RsaToolPath)), TEST_SIGNING_PRIVATE_KEY_FILENAME)
+ args.PrivateKeyFile = open(args.PrivateKeyFileName, 'rb')
+ args.PrivateKeyFile.close()
+ except:
+ print('ERROR: test signing private key file %s missing' % (args.PrivateKeyFileName))
+ sys.exit(1)
+
+ #
+ # Extract public key from private key into STDOUT
+ #
+ Process = subprocess.Popen('%s rsa -in "%s" -modulus -noout' % (OpenSslCommand, args.PrivateKeyFileName), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip()
+ PublicKeyHexString = PublicKeyHexString.decode('utf-8')
+ PublicKey = ''
+ while len(PublicKeyHexString) > 0:
+ PublicKey = PublicKey + PublicKeyHexString[0:2]
+ PublicKeyHexString=PublicKeyHexString[2:]
+ if Process.returncode != 0:
+ sys.exit(Process.returncode)
+
+ if args.MonotonicCountStr:
+ try:
+ if args.MonotonicCountStr.upper().startswith('0X'):
+ args.MonotonicCountValue = int(args.MonotonicCountStr, 16)
+ else:
+ args.MonotonicCountValue = int(args.MonotonicCountStr)
+ except:
+ pass
+
+ if args.Encode:
+ FullInputFileBuffer = args.InputFileBuffer
+ if args.MonotonicCountStr:
+ format = "%dsQ" % len(args.InputFileBuffer)
+ FullInputFileBuffer = struct.pack(format, args.InputFileBuffer, args.MonotonicCountValue)
+ #
+ # Sign the input file using the specified private key and capture signature from STDOUT
+ #
+ Process = subprocess.Popen('%s dgst -sha256 -sign "%s"' % (OpenSslCommand, args.PrivateKeyFileName), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ Signature = Process.communicate(input=FullInputFileBuffer)[0]
+ if Process.returncode != 0:
+ sys.exit(Process.returncode)
+
+ #
+ # Write output file that contains hash GUID, Public Key, Signature, and Input data
+ #
+ args.OutputFile = open(args.OutputFileName, 'wb')
+ args.OutputFile.write(EFI_HASH_ALGORITHM_SHA256_GUID.bytes_le)
+ args.OutputFile.write(bytearray.fromhex(str(PublicKey)))
+ args.OutputFile.write(Signature)
+ args.OutputFile.write(args.InputFileBuffer)
+ args.OutputFile.close()
+
+ if args.Decode:
+ #
+ # Parse Hash Type, Public Key, and Signature from the section header
+ #
+ Header = EFI_CERT_BLOCK_RSA_2048_SHA256._make(EFI_CERT_BLOCK_RSA_2048_SHA256_STRUCT.unpack_from(args.InputFileBuffer))
+ args.InputFileBuffer = args.InputFileBuffer[EFI_CERT_BLOCK_RSA_2048_SHA256_STRUCT.size:]
+
+ #
+ # Verify that the Hash Type matches the expected SHA256 type
+ #
+ if uuid.UUID(bytes_le = Header.HashType) != EFI_HASH_ALGORITHM_SHA256_GUID:
+ print('ERROR: unsupport hash GUID')
+ sys.exit(1)
+
+ #
+ # Verify the public key
+ #
+ if Header.PublicKey != bytearray.fromhex(PublicKey):
+ print('ERROR: Public key in input file does not match public key from private key file')
+ sys.exit(1)
+
+ FullInputFileBuffer = args.InputFileBuffer
+ if args.MonotonicCountStr:
+ format = "%dsQ" % len(args.InputFileBuffer)
+ FullInputFileBuffer = struct.pack(format, args.InputFileBuffer, args.MonotonicCountValue)
+
+ #
+ # Write Signature to output file
+ #
+ open(args.OutputFileName, 'wb').write(Header.Signature)
+
+ #
+ # Verify signature
+ #
+ Process = subprocess.Popen('%s dgst -sha256 -prverify "%s" -signature %s' % (OpenSslCommand, args.PrivateKeyFileName, args.OutputFileName), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ Process.communicate(input=FullInputFileBuffer)
+ if Process.returncode != 0:
+ print('ERROR: Verification failed')
+ os.remove (args.OutputFileName)
+ sys.exit(Process.returncode)
+
+ #
+ # Save output file contents from input file
+ #
+ open(args.OutputFileName, 'wb').write(args.InputFileBuffer)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPrivateKey.pem b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPrivateKey.pem
new file mode 100644
index 00000000..ced48887
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPrivateKey.pem
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAudErrthutWCNdQy+XWCiKg89B9BPb9GaOtEBL0tIOGCkmAY3
+/aT13OCxR1f4FIGavKT+7aDZDnwyK3g1U/J2q4DdPHzSRoOOK5h9dqRabqEtlgaE
+2OQeLvs20hKARzQEHSj9aaUKPfN/uF7DYENs1N38QqSo//R1FZ4jJ1th5z+4RoIE
+/M3+yaSxGvOi+0FUEW57stqLGMTY8WFZeUOmCuRMGKC+5o57/H2OJa+Nh9rcO+8e
+Nc7/mR5ZB3lbkzq/3Wf9wZcQkCZaIA2uKSEM81I/3SGvu8P9+jfPDJ027dQOdjop
+M+0CA+Id5+XPoUFOy1aWKnM6Keq8BWsqA2f/awIDAQABAoIBAQCElZVfupqz3GLS
+w/cI1ayf6iOX8bDcjmhH013PCx5K69pFhxpoZ03u/nw/9arTpu5r8CbeE0kNJtz9
+W1Zhwb2e/eHeCQpYugG1ZacQApFsIqFOTPR5dbt7tlAVcJILPDg7PkYjHVnDfn0B
+ZsxtYW7ELGiUfL0BdMIeXKmkqXWf9AU37Frky3i1GwmyNy15RWj9T/JS8Wg6G3vq
+ZCqk7nOjMap8vFw0w6Z0FodapEiYxvfLsq/CQxZm5B2IeYPiIXuTif69Fw0Yhm5x
++GBOrrOZjFJuhFWngJg8lkh77P++CfmkX+V9WUHBPYyU6R9WPnpyC9T2sfyrTBG0
+yVc1mGTxAoGBAOtc3EaV7tVqzAQupTI4lyD1+eOfYIzPGbPVYkEQg81ru+O4SuNw
+n8P31L0O1F+YnlhHsDW2B2CsEAi1dFgfj5uUni4Ug42tNZLQKYHy6x436HYj2BRC
+fvju4gUDj2rJaKMo4bb1shpyIZce5APg0kO4Pfp8B29+qaNjJJSm/xdfAoGBAMoc
+KzKIQN+uHnXdRMCTnKStiyxJ5RRlhkeDEgrhjirL3d5GCsNxd1AJUYBEkgs7K6aX
+4HbuJxIypTNQJwqMe4ltGQazOxG4JKv7JB0bchXMBfzGUA4tTbyQ45APFvPhtWG9
+3xwIyJy602dWQzF7nrNkQo81HHgd8RBlQDjY5E91AoGBAJW12KvRfT0llKZoYU4j
+rwYZUJ2YaFrI9WltpvUKIzyMddRxCrnHjuihgSHNCAvS0Y4H868GnYTBjeVsPpg7
+Yyasjz1ycQeykKxVWsG9Gg97spNLU4v3jDqeAQj9bcGYYcW/IGw3ttWZmYI99P3+
+iODolgv5zuT95bv3OXKRExvzAoGAQeJ04oAwRbqJC3saEn0ML1Kzb7iXCNMftugv
+VYS5x5G1veOvVo8CigC/pMS1pxue2yvfbGWuLux7lgKm93VcQCxkYB7FPr18luPD
+ngl4Y0qjGQe8ySJTmQjSZf/FR7NoYXh35remThFJIQKp3NOjpG8mk3CI209HakTX
+brClnJECgYA+Qw69eMTrIIBH2Kw0mazgG9lCoCRA5GF8/qwgH0yxyP0yy/gtxXMs
+nMqwo7X3yIIf5Aracc5MbXfaJgR5fOpCrtu3NJGdHC63JblB//7hXiklGNKDaCiV
+EBkLJ3YHYn8fAr8n+agE0PSfXFVnC1ToPRWUbBcGrgE16bpzn8o9gw==
+-----END RSA PRIVATE KEY-----
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPublicKey.bin b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPublicKey.bin
new file mode 100644
index 00000000..8c15a000
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPublicKey.bin
@@ -0,0 +1 @@
+‘)ĽêmÚ³ªoPüÛK~<ÖܤzÝæŒs–¢Ô¦M \ No newline at end of file
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPublicKey.txt b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPublicKey.txt
new file mode 100644
index 00000000..f538f8b4
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Rsa2048Sha256Sign/TestSigningPublicKey.txt
@@ -0,0 +1 @@
+{0x91, 0x29, 0xc4, 0xbd, 0xea, 0x6d, 0xda, 0xb3, 0xaa, 0x6f, 0x50, 0x16, 0xfc, 0xdb, 0x4b, 0x7e, 0x3c, 0xd6, 0xdc, 0xa4, 0x7a, 0x0e, 0xdd, 0xe6, 0x15, 0x8c, 0x73, 0x96, 0xa2, 0xd4, 0xa6, 0x4d}
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Split/Split.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Split/Split.py
new file mode 100755
index 00000000..e0d91e4e
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Split/Split.py
@@ -0,0 +1,210 @@
+# @file
+# Split a file into two pieces at the request offset.
+#
+# Copyright (c) 2021, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+# Import Modules
+#
+import argparse
+import os
+import io
+import shutil
+import logging
+import sys
+import tempfile
+
+parser = argparse.ArgumentParser(description='''
+SplitFile creates two Binary files either in the same directory as the current working directory or in the specified directory.
+''')
+parser.add_argument("-f", "--filename", dest="inputfile",
+ required=True, help="The input file to split tool.")
+parser.add_argument("-s", "--split", dest="position",
+ required=True, help="The number of bytes in the first file. The valid format are HEX, Decimal and Decimal[KMG].")
+parser.add_argument("-p", "--prefix", dest="output",
+ help="The output folder.")
+parser.add_argument("-o", "--firstfile", help="The first file name")
+parser.add_argument("-t", "--secondfile", help="The second file name")
+parser.add_argument("--version", action="version", version='%(prog)s Version 2.0',
+ help="Print debug information.")
+
+group = parser.add_mutually_exclusive_group()
+group.add_argument("-v", "--verbose", action="store_true",
+ help="Print debug information.")
+group.add_argument("-q", "--quiet", action="store_true",
+ help="Disable all messages except fatal errors")
+
+SizeDict = {
+ "K": 1024,
+ "M": 1024*1024,
+ "G": 1024*1024*1024
+}
+
+
+def GetPositionValue(position):
+ '''
+ Parse the string of the argument position and return a decimal number.
+ The valid position formats are
+ 1. HEX
+ e.g. 0x1000 or 0X1000
+ 2. Decimal
+ e.g. 100
+ 3. Decimal[KMG]
+ e.g. 100K or 100M or 100G or 100k or 100m or 100g
+ '''
+ logger = logging.getLogger('Split')
+ PosVal = 0
+ header = position[:2].upper()
+ tailer = position[-1].upper()
+
+ try:
+ if tailer in SizeDict:
+ PosVal = int(position[:-1]) * SizeDict[tailer]
+ else:
+ if header == "0X":
+ PosVal = int(position, 16)
+ else:
+ PosVal = int(position)
+ except Exception as e:
+ logger.error(
+ "The parameter %s format is incorrect. The valid format is HEX, Decimal and Decimal[KMG]." % position)
+ raise(e)
+
+ return PosVal
+
+
+def getFileSize(filename):
+ '''
+ Read the input file and return the file size.
+ '''
+ logger = logging.getLogger('Split')
+ length = 0
+ try:
+ with open(filename, "rb") as fin:
+ fin.seek(0, io.SEEK_END)
+ length = fin.tell()
+ except Exception as e:
+ logger.error("Access file failed: %s", filename)
+ raise(e)
+
+ return length
+
+def getoutputfileabs(inputfile, prefix, outputfile,index):
+ inputfile = os.path.abspath(inputfile)
+ if outputfile is None:
+ if prefix is None:
+ outputfileabs = os.path.join(os.path.dirname(inputfile), "{}{}".format(os.path.basename(inputfile),index))
+ else:
+ if os.path.isabs(prefix):
+ outputfileabs = os.path.join(prefix, "{}{}".format(os.path.basename(inputfile),index))
+ else:
+ outputfileabs = os.path.join(os.getcwd(), prefix, "{}{}".format(os.path.basename(inputfile),index))
+ elif not os.path.isabs(outputfile):
+ if prefix is None:
+ outputfileabs = os.path.join(os.getcwd(), outputfile)
+ else:
+ if os.path.isabs(prefix):
+ outputfileabs = os.path.join(prefix, outputfile)
+ else:
+ outputfileabs = os.path.join(os.getcwd(), prefix, outputfile)
+ else:
+ outputfileabs = outputfile
+ return outputfileabs
+
+def splitFile(inputfile, position, outputdir=None, outputfile1=None, outputfile2=None):
+ '''
+ Split the inputfile into outputfile1 and outputfile2 from the position.
+ '''
+ logger = logging.getLogger('Split')
+
+ if not os.path.exists(inputfile):
+ logger.error("File Not Found: %s" % inputfile)
+ raise(Exception)
+
+ if outputfile1 and outputfile2 and outputfile1 == outputfile2:
+ logger.error(
+ "The firstfile and the secondfile can't be the same: %s" % outputfile1)
+ raise(Exception)
+
+ # Create dir for the output files
+ try:
+
+ outputfile1 = getoutputfileabs(inputfile, outputdir, outputfile1,1)
+ outputfolder = os.path.dirname(outputfile1)
+ if not os.path.exists(outputfolder):
+ os.makedirs(outputfolder)
+
+ outputfile2 = getoutputfileabs(inputfile, outputdir, outputfile2,2)
+ outputfolder = os.path.dirname(outputfile2)
+ if not os.path.exists(outputfolder):
+ os.makedirs(outputfolder)
+
+ except Exception as e:
+ logger.error("Can't make dir: %s" % outputfolder)
+ raise(e)
+
+ if position <= 0:
+ if outputfile2 != os.path.abspath(inputfile):
+ shutil.copy2(os.path.abspath(inputfile), outputfile2)
+ with open(outputfile1, "wb") as fout:
+ fout.write(b'')
+ else:
+ inputfilesize = getFileSize(inputfile)
+ if position >= inputfilesize:
+ if outputfile1 != os.path.abspath(inputfile):
+ shutil.copy2(os.path.abspath(inputfile), outputfile1)
+ with open(outputfile2, "wb") as fout:
+ fout.write(b'')
+ else:
+ try:
+ tempdir = tempfile.mkdtemp()
+ tempfile1 = os.path.join(tempdir, "file1.bin")
+ tempfile2 = os.path.join(tempdir, "file2.bin")
+ with open(inputfile, "rb") as fin:
+ content1 = fin.read(position)
+ with open(tempfile1, "wb") as fout1:
+ fout1.write(content1)
+
+ content2 = fin.read(inputfilesize - position)
+ with open(tempfile2, "wb") as fout2:
+ fout2.write(content2)
+ shutil.copy2(tempfile1, outputfile1)
+ shutil.copy2(tempfile2, outputfile2)
+ except Exception as e:
+ logger.error("Split file failed")
+ raise(e)
+ finally:
+ if os.path.exists(tempdir):
+ shutil.rmtree(tempdir)
+
+
+def main():
+ args = parser.parse_args()
+ status = 0
+
+ logger = logging.getLogger('Split')
+ if args.quiet:
+ logger.setLevel(logging.CRITICAL)
+ if args.verbose:
+ logger.setLevel(logging.DEBUG)
+
+ lh = logging.StreamHandler(sys.stdout)
+ lf = logging.Formatter("%(levelname)-8s: %(message)s")
+ lh.setFormatter(lf)
+ logger.addHandler(lh)
+
+ try:
+ position = GetPositionValue(args.position)
+ splitFile(args.inputfile, position, args.output,
+ args.firstfile, args.secondfile)
+ except Exception as e:
+ status = 1
+
+ return status
+
+
+if __name__ == "__main__":
+ exit(main())
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Split/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Split/__init__.py
new file mode 100644
index 00000000..c05ffbbd
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Split/__init__.py
@@ -0,0 +1,10 @@
+# @file
+# Split a file into two pieces at the request offset.
+#
+# Copyright (c) 2021, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+# Import Modules
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/Table.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/Table.py
new file mode 100755
index 00000000..3d6e3c6f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/Table.py
@@ -0,0 +1,114 @@
+## @file
+# This file is used to create/update/query/erase a common table
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+
+## TableFile
+#
+# This class defined a common table
+#
+# @param object: Inherited from object class
+#
+# @param Cursor: Cursor of the database
+# @param TableName: Name of the table
+#
+class Table(object):
+ def __init__(self, Cursor):
+ self.Cur = Cursor
+ self.Table = ''
+ self.ID = 0
+
+ ## Create table
+ #
+ # Create a table
+ #
+ def Create(self, SqlCommand):
+ self.Cur.execute(SqlCommand)
+ self.ID = 0
+ EdkLogger.verbose(SqlCommand + " ... DONE!")
+
+ ## Insert table
+ #
+ # Insert a record into a table
+ #
+ def Insert(self, SqlCommand):
+ self.Exec(SqlCommand)
+
+ ## Query table
+ #
+ # Query all records of the table
+ #
+ def Query(self):
+ EdkLogger.verbose("\nQuery table %s started ..." % self.Table)
+ SqlCommand = """select * from %s""" % self.Table
+ self.Cur.execute(SqlCommand)
+ for Rs in self.Cur:
+ EdkLogger.verbose(str(Rs))
+
+ TotalCount = self.GetCount()
+ EdkLogger.verbose("*** Total %s records in table %s ***" % (TotalCount, self.Table) )
+ EdkLogger.verbose("Query tabel %s DONE!" % self.Table)
+
+ ## Drop a table
+ #
+ # Drop the table
+ #
+ def Drop(self):
+ SqlCommand = """drop table IF EXISTS %s""" % self.Table
+ self.Cur.execute(SqlCommand)
+ EdkLogger.verbose("Drop tabel %s ... DONE!" % self.Table)
+
+ ## Get count
+ #
+ # Get a count of all records of the table
+ #
+ # @retval Count: Total count of all records
+ #
+ def GetCount(self):
+ SqlCommand = """select count(ID) from %s""" % self.Table
+ self.Cur.execute(SqlCommand)
+ for Item in self.Cur:
+ return Item[0]
+
+ ## Generate ID
+ #
+ # Generate an ID if input ID is -1
+ #
+ # @param ID: Input ID
+ #
+ # @retval ID: New generated ID
+ #
+ def GenerateID(self, ID):
+ if ID == -1:
+ self.ID = self.ID + 1
+
+ return self.ID
+
+ ## Init the ID of the table
+ #
+ # Init the ID of the table
+ #
+ def InitID(self):
+ self.ID = self.GetCount()
+
+ ## Exec
+ #
+ # Exec Sql Command, return result
+ #
+ # @param SqlCommand: The SqlCommand to be executed
+ #
+ # @retval RecordSet: The result after executed
+ #
+ def Exec(self, SqlCommand):
+ EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
+ self.Cur.execute(SqlCommand)
+ RecordSet = self.Cur.fetchall()
+ EdkLogger.debug(4, "RecordSet: %s" % RecordSet)
+ return RecordSet
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableDataModel.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableDataModel.py
new file mode 100755
index 00000000..fa8b1dfa
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableDataModel.py
@@ -0,0 +1,90 @@
+## @file
+# This file is used to create/update/query/erase table for data models
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.EdkLogger as EdkLogger
+import CommonDataClass.DataClass as DataClass
+from Table.Table import Table
+from Common.StringUtils import ConvertToSqlString
+
+## TableDataModel
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableDataModel(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'DataModel'
+
+ ## Create table
+ #
+ # Create table DataModel
+ #
+ # @param ID: ID of a ModelType
+ # @param CrossIndex: CrossIndex of a ModelType
+ # @param Name: Name of a ModelType
+ # @param Description: Description of a ModelType
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ CrossIndex INTEGER NOT NULL,
+ Name VARCHAR NOT NULL,
+ Description VARCHAR
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table DataModel
+ #
+ # @param ID: ID of a ModelType
+ # @param CrossIndex: CrossIndex of a ModelType
+ # @param Name: Name of a ModelType
+ # @param Description: Description of a ModelType
+ #
+ def Insert(self, CrossIndex, Name, Description):
+ self.ID = self.ID + 1
+ (Name, Description) = ConvertToSqlString((Name, Description))
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s')""" % (self.Table, self.ID, CrossIndex, Name, Description)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+
+ ## Init table
+ #
+ # Create all default records of table DataModel
+ #
+ def InitTable(self):
+ EdkLogger.verbose("\nInitialize table DataModel started ...")
+ for Item in DataClass.MODEL_LIST:
+ CrossIndex = Item[1]
+ Name = Item[0]
+ Description = Item[0]
+ self.Insert(CrossIndex, Name, Description)
+ EdkLogger.verbose("Initialize table DataModel ... DONE!")
+
+ ## Get CrossIndex
+ #
+ # Get a model's cross index from its name
+ #
+ # @param ModelName: Name of the model
+ # @retval CrossIndex: CrossIndex of the model
+ #
+ def GetCrossIndex(self, ModelName):
+ CrossIndex = -1
+ SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""
+ self.Cur.execute(SqlCommand)
+ for Item in self.Cur:
+ CrossIndex = Item[0]
+
+ return CrossIndex
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableDec.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableDec.py
new file mode 100755
index 00000000..b6297a3d
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableDec.py
@@ -0,0 +1,103 @@
+## @file
+# This file is used to create/update/query/erase table for dec datas
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.EdkLogger as EdkLogger
+import CommonDataClass.DataClass as DataClass
+from Table.Table import Table
+from Common.StringUtils import ConvertToSqlString
+
+## TableDec
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableDec(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Dec'
+
+ ## Create table
+ #
+ # Create table Dec
+ #
+ # @param ID: ID of a Dec item
+ # @param Model: Model of a Dec item
+ # @param Value1: Value1 of a Dec item
+ # @param Value2: Value2 of a Dec item
+ # @param Value3: Value3 of a Dec item
+ # @param Arch: Arch of a Dec item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Dec item
+ # @param StartColumn: StartColumn of a Dec item
+ # @param EndLine: EndLine of a Dec item
+ # @param EndColumn: EndColumn of a Dec item
+ # @param Enabled: If this item enabled
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 VARCHAR NOT NULL,
+ Value2 VARCHAR,
+ Value3 VARCHAR,
+ Arch VarCHAR,
+ BelongsToItem SINGLE NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Dec
+ #
+ # @param ID: ID of a Dec item
+ # @param Model: Model of a Dec item
+ # @param Value1: Value1 of a Dec item
+ # @param Value2: Value2 of a Dec item
+ # @param Value3: Value3 of a Dec item
+ # @param Arch: Arch of a Dec item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Dec item
+ # @param StartColumn: StartColumn of a Dec item
+ # @param EndLine: EndLine of a Dec item
+ # @param EndColumn: EndColumn of a Dec item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Value4, Value5, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):
+ self.ID = self.ID + 1
+ (Value1, Value2, Value3, Arch) = ConvertToSqlString((Value1, Value2, Value3, Arch))
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model):
+ SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s
+ where Model = %s
+ and Enabled > -1""" % (self.Table, Model)
+ EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
+ self.Cur.execute(SqlCommand)
+ return self.Cur.fetchall()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableDsc.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableDsc.py
new file mode 100755
index 00000000..8cca2db6
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableDsc.py
@@ -0,0 +1,103 @@
+from __future__ import absolute_import
+## @file
+# This file is used to create/update/query/erase table for dsc datas
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+import CommonDataClass.DataClass as DataClass
+from Table.Table import Table
+from Common.StringUtils import ConvertToSqlString
+
+## TableDsc
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableDsc(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Dsc'
+
+ ## Create table
+ #
+ # Create table Dsc
+ #
+ # @param ID: ID of a Dsc item
+ # @param Model: Model of a Dsc item
+ # @param Value1: Value1 of a Dsc item
+ # @param Value2: Value2 of a Dsc item
+ # @param Value3: Value3 of a Dsc item
+ # @param Arch: Arch of a Dsc item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Dsc item
+ # @param StartColumn: StartColumn of a Dsc item
+ # @param EndLine: EndLine of a Dsc item
+ # @param EndColumn: EndColumn of a Dsc item
+ # @param Enabled: If this item enabled
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 VARCHAR NOT NULL,
+ Value2 VARCHAR,
+ Value3 VARCHAR,
+ Arch VarCHAR,
+ BelongsToItem SINGLE NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Dsc
+ #
+ # @param ID: ID of a Dsc item
+ # @param Model: Model of a Dsc item
+ # @param Value1: Value1 of a Dsc item
+ # @param Value2: Value2 of a Dsc item
+ # @param Value3: Value3 of a Dsc item
+ # @param Arch: Arch of a Dsc item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Dsc item
+ # @param StartColumn: StartColumn of a Dsc item
+ # @param EndLine: EndLine of a Dsc item
+ # @param EndColumn: EndColumn of a Dsc item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):
+ self.ID = self.ID + 1
+ (Value1, Value2, Value3, Arch) = ConvertToSqlString((Value1, Value2, Value3, Arch))
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model):
+ SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s
+ where Model = %s
+ and Enabled > -1""" % (self.Table, Model)
+ EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
+ self.Cur.execute(SqlCommand)
+ return self.Cur.fetchall()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableEotReport.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableEotReport.py
new file mode 100755
index 00000000..c54b6c62
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableEotReport.py
@@ -0,0 +1,71 @@
+## @file
+# This file is used to create/update/query/erase table for ECC reports
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.EdkLogger as EdkLogger
+import Common.LongFilePathOs as os, time
+from Table.Table import Table
+from Common.StringUtils import ConvertToSqlString2
+import Eot.EotToolError as EotToolError
+import Eot.EotGlobalData as EotGlobalData
+
+## TableReport
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableEotReport(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Report'
+
+ ## Create table
+ #
+ # Create table report
+ #
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ ModuleID INTEGER DEFAULT -1,
+ ModuleName TEXT DEFAULT '',
+ ModuleGuid TEXT DEFAULT '',
+ SourceFileID INTEGER DEFAULT -1,
+ SourceFileFullPath TEXT DEFAULT '',
+ ItemName TEXT DEFAULT '',
+ ItemType TEXT DEFAULT '',
+ ItemMode TEXT DEFAULT '',
+ GuidName TEXT DEFAULT '',
+ GuidMacro TEXT DEFAULT '',
+ GuidValue TEXT DEFAULT '',
+ BelongsToFunction TEXT DEFAULT '',
+ Enabled INTEGER DEFAULT 0
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table report
+ #
+ #
+ def Insert(self, ModuleID = -1, ModuleName = '', ModuleGuid = '', SourceFileID = -1, SourceFileFullPath = '', \
+ ItemName = '', ItemType = '', ItemMode = '', GuidName = '', GuidMacro = '', GuidValue = '', BelongsToFunction = '', Enabled = 0):
+ self.ID = self.ID + 1
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', %s, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', %s)""" \
+ % (self.Table, self.ID, ModuleID, ModuleName, ModuleGuid, SourceFileID, SourceFileFullPath, \
+ ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, Enabled)
+ Table.Insert(self, SqlCommand)
+
+ def GetMaxID(self):
+ SqlCommand = """select max(ID) from %s""" % self.Table
+ self.Cur.execute(SqlCommand)
+ for Item in self.Cur:
+ return Item[0]
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableFdf.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableFdf.py
new file mode 100755
index 00000000..14b8919f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableFdf.py
@@ -0,0 +1,104 @@
+## @file
+# This file is used to create/update/query/erase table for fdf datas
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.EdkLogger as EdkLogger
+import CommonDataClass.DataClass as DataClass
+from Table.Table import Table
+from Common.StringUtils import ConvertToSqlString
+
+## TableFdf
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableFdf(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Fdf'
+
+ ## Create table
+ #
+ # Create table Fdf
+ #
+ # @param ID: ID of a Fdf item
+ # @param Model: Model of a Fdf item
+ # @param Value1: Value1 of a Fdf item
+ # @param Value2: Value2 of a Fdf item
+ # @param Value3: Value3 of a Fdf item
+ # @param Arch: Arch of a Fdf item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which fdf file
+ # @param StartLine: StartLine of a Fdf item
+ # @param StartColumn: StartColumn of a Fdf item
+ # @param EndLine: EndLine of a Fdf item
+ # @param EndColumn: EndColumn of a Fdf item
+ # @param Enabled: If this item enabled
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 VARCHAR NOT NULL,
+ Value2 VARCHAR,
+ Value3 VARCHAR,
+ Scope1 VarCHAR,
+ Scope2 VarCHAR,
+ BelongsToItem SINGLE NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Fdf
+ #
+ # @param ID: ID of a Fdf item
+ # @param Model: Model of a Fdf item
+ # @param Value1: Value1 of a Fdf item
+ # @param Value2: Value2 of a Fdf item
+ # @param Value3: Value3 of a Fdf item
+ # @param Arch: Arch of a Fdf item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which fdf file
+ # @param StartLine: StartLine of a Fdf item
+ # @param StartColumn: StartColumn of a Fdf item
+ # @param EndLine: EndLine of a Fdf item
+ # @param EndColumn: EndColumn of a Fdf item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):
+ self.ID = self.ID + 1
+ (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model):
+ SqlCommand = """select ID, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine from %s
+ where Model = %s
+ and Enabled > -1""" % (self.Table, Model)
+ EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
+ self.Cur.execute(SqlCommand)
+ return self.Cur.fetchall()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableFile.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableFile.py
new file mode 100755
index 00000000..a2442288
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableFile.py
@@ -0,0 +1,99 @@
+## @file
+# This file is used to create/update/query/erase table for files
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.EdkLogger as EdkLogger
+from Table.Table import Table
+from Common.StringUtils import ConvertToSqlString
+import Common.LongFilePathOs as os
+from CommonDataClass.DataClass import FileClass
+
+## TableFile
+#
+# This class defined a table used for file
+#
+# @param object: Inherited from object class
+#
+class TableFile(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'File'
+
+ ## Create table
+ #
+ # Create table File
+ #
+ # @param ID: ID of a File
+ # @param Name: Name of a File
+ # @param ExtName: ExtName of a File
+ # @param Path: Path of a File
+ # @param FullPath: FullPath of a File
+ # @param Model: Model of a File
+ # @param TimeStamp: TimeStamp of a File
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ Name VARCHAR NOT NULL,
+ ExtName VARCHAR,
+ Path VARCHAR,
+ FullPath VARCHAR NOT NULL,
+ Model INTEGER DEFAULT 0,
+ TimeStamp VARCHAR NOT NULL
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table File
+ #
+ # @param ID: ID of a File
+ # @param Name: Name of a File
+ # @param ExtName: ExtName of a File
+ # @param Path: Path of a File
+ # @param FullPath: FullPath of a File
+ # @param Model: Model of a File
+ # @param TimeStamp: TimeStamp of a File
+ #
+ def Insert(self, Name, ExtName, Path, FullPath, Model, TimeStamp):
+ self.ID = self.ID + 1
+ (Name, ExtName, Path, FullPath) = ConvertToSqlString((Name, ExtName, Path, FullPath))
+ SqlCommand = """insert into %s values(%s, '%s', '%s', '%s', '%s', %s, '%s')""" \
+ % (self.Table, self.ID, Name, ExtName, Path, FullPath, Model, TimeStamp)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+ ## InsertFile
+ #
+ # Insert one file to table
+ #
+ # @param FileFullPath: The full path of the file
+ # @param Model: The model of the file
+ #
+ # @retval FileID: The ID after record is inserted
+ #
+ def InsertFile(self, FileFullPath, Model):
+ (Filepath, Name) = os.path.split(FileFullPath)
+ (Root, Ext) = os.path.splitext(FileFullPath)
+ TimeStamp = os.stat(FileFullPath)[8]
+ File = FileClass(-1, Name, Ext, Filepath, FileFullPath, Model, '', [], [], [])
+ return self.Insert(File.Name, File.ExtName, File.Path, File.FullPath, File.Model, TimeStamp)
+
+ ## Get ID of a given file
+ #
+ # @param FilePath Path of file
+ #
+ # @retval ID ID value of given file in the table
+ #
+ def GetFileId(self, File):
+ QueryScript = "select ID from %s where FullPath = '%s'" % (self.Table, str(File))
+ RecordList = self.Exec(QueryScript)
+ if len(RecordList) == 0:
+ return None
+ return RecordList[0][0]
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableFunction.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableFunction.py
new file mode 100755
index 00000000..4e5ab4d9
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableFunction.py
@@ -0,0 +1,90 @@
+## @file
+# This file is used to create/update/query/erase table for functions
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.EdkLogger as EdkLogger
+from Table.Table import Table
+from Common.StringUtils import ConvertToSqlString
+
+## TableFunction
+#
+# This class defined a table used for function
+#
+# @param Table: Inherited from Table class
+#
+class TableFunction(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Function'
+
+ ## Create table
+ #
+ # Create table Function
+ #
+ # @param ID: ID of a Function
+ # @param Header: Header of a Function
+ # @param Modifier: Modifier of a Function
+ # @param Name: Name of a Function
+ # @param ReturnStatement: ReturnStatement of a Function
+ # @param StartLine: StartLine of a Function
+ # @param StartColumn: StartColumn of a Function
+ # @param EndLine: EndLine of a Function
+ # @param EndColumn: EndColumn of a Function
+ # @param BodyStartLine: StartLine of a Function body
+ # @param BodyStartColumn: StartColumn of a Function body
+ # @param BelongsToFile: The Function belongs to which file
+ # @param FunNameStartLine: StartLine of a Function name
+ # @param FunNameStartColumn: StartColumn of a Function name
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ Header TEXT,
+ Modifier VARCHAR,
+ Name VARCHAR NOT NULL,
+ ReturnStatement VARCHAR,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ BodyStartLine INTEGER NOT NULL,
+ BodyStartColumn INTEGER NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ FunNameStartLine INTEGER NOT NULL,
+ FunNameStartColumn INTEGER NOT NULL
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Function
+ #
+ # @param ID: ID of a Function
+ # @param Header: Header of a Function
+ # @param Modifier: Modifier of a Function
+ # @param Name: Name of a Function
+ # @param ReturnStatement: ReturnStatement of a Function
+ # @param StartLine: StartLine of a Function
+ # @param StartColumn: StartColumn of a Function
+ # @param EndLine: EndLine of a Function
+ # @param EndColumn: EndColumn of a Function
+ # @param BodyStartLine: StartLine of a Function body
+ # @param BodyStartColumn: StartColumn of a Function body
+ # @param BelongsToFile: The Function belongs to which file
+ # @param FunNameStartLine: StartLine of a Function name
+ # @param FunNameStartColumn: StartColumn of a Function name
+ #
+ def Insert(self, Header, Modifier, Name, ReturnStatement, StartLine, StartColumn, EndLine, EndColumn, BodyStartLine, BodyStartColumn, BelongsToFile, FunNameStartLine, FunNameStartColumn):
+ self.ID = self.ID + 1
+ (Header, Modifier, Name, ReturnStatement) = ConvertToSqlString((Header, Modifier, Name, ReturnStatement))
+ SqlCommand = """insert into %s values(%s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, Header, Modifier, Name, ReturnStatement, StartLine, StartColumn, EndLine, EndColumn, BodyStartLine, BodyStartColumn, BelongsToFile, FunNameStartLine, FunNameStartColumn)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableIdentifier.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableIdentifier.py
new file mode 100755
index 00000000..97300361
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableIdentifier.py
@@ -0,0 +1,85 @@
+## @file
+# This file is used to create/update/query/erase table for Identifiers
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.EdkLogger as EdkLogger
+from Common.StringUtils import ConvertToSqlString
+from Table.Table import Table
+
+## TableIdentifier
+#
+# This class defined a table used for Identifier
+#
+# @param object: Inherited from object class
+#
+#
+class TableIdentifier(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Identifier'
+
+ ## Create table
+ #
+ # Create table Identifier
+ #
+ # @param ID: ID of a Identifier
+ # @param Modifier: Modifier of a Identifier
+ # @param Type: Type of a Identifier
+ # @param Name: Name of a Identifier
+ # @param Value: Value of a Identifier
+ # @param Model: Model of a Identifier
+ # @param BelongsToFile: The Identifier belongs to which file
+ # @param BelongsToFunction: The Identifier belongs to which function
+ # @param StartLine: StartLine of a Identifier
+ # @param StartColumn: StartColumn of a Identifier
+ # @param EndLine: EndLine of a Identifier
+ # @param EndColumn: EndColumn of a Identifier
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s(ID INTEGER PRIMARY KEY,
+ Modifier VARCHAR,
+ Type VARCHAR,
+ Name VARCHAR NOT NULL,
+ Value VARCHAR NOT NULL,
+ Model INTEGER NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ BelongsToFunction SINGLE DEFAULT -1,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Identifier
+ #
+ # @param ID: ID of a Identifier
+ # @param Modifier: Modifier of a Identifier
+ # @param Type: Type of a Identifier
+ # @param Name: Name of a Identifier
+ # @param Value: Value of a Identifier
+ # @param Model: Model of a Identifier
+ # @param BelongsToFile: The Identifier belongs to which file
+ # @param BelongsToFunction: The Identifier belongs to which function
+ # @param StartLine: StartLine of a Identifier
+ # @param StartColumn: StartColumn of a Identifier
+ # @param EndLine: EndLine of a Identifier
+ # @param EndColumn: EndColumn of a Identifier
+ #
+ def Insert(self, Modifier, Type, Name, Value, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn):
+ self.ID = self.ID + 1
+ (Modifier, Type, Name, Value) = ConvertToSqlString((Modifier, Type, Name, Value))
+ SqlCommand = """insert into %s values(%s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, Modifier, Type, Name, Value, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableInf.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableInf.py
new file mode 100755
index 00000000..3c5c0e1d
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableInf.py
@@ -0,0 +1,109 @@
+## @file
+# This file is used to create/update/query/erase table for inf datas
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.EdkLogger as EdkLogger
+import CommonDataClass.DataClass as DataClass
+from Table.Table import Table
+from Common.StringUtils import ConvertToSqlString
+
+## TableInf
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableInf(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Inf'
+
+ ## Create table
+ #
+ # Create table Inf
+ #
+ # @param ID: ID of a Inf item
+ # @param Model: Model of a Inf item
+ # @param Value1: Value1 of a Inf item
+ # @param Value2: Value2 of a Inf item
+ # @param Value3: Value3 of a Inf item
+ # @param Value4: Value4 of a Inf item
+ # @param Value5: Value5 of a Inf item
+ # @param Arch: Arch of a Inf item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Inf item
+ # @param StartColumn: StartColumn of a Inf item
+ # @param EndLine: EndLine of a Inf item
+ # @param EndColumn: EndColumn of a Inf item
+ # @param Enabled: If this item enabled
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 VARCHAR NOT NULL,
+ Value2 VARCHAR,
+ Value3 VARCHAR,
+ Value4 VARCHAR,
+ Value5 VARCHAR,
+ Arch VarCHAR,
+ BelongsToItem SINGLE NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Inf
+ #
+ # @param ID: ID of a Inf item
+ # @param Model: Model of a Inf item
+ # @param Value1: Value1 of a Inf item
+ # @param Value2: Value2 of a Inf item
+ # @param Value3: Value3 of a Inf item
+ # @param Value4: Value4 of a Inf item
+ # @param Value5: Value5 of a Inf item
+ # @param Arch: Arch of a Inf item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Inf item
+ # @param StartColumn: StartColumn of a Inf item
+ # @param EndLine: EndLine of a Inf item
+ # @param EndColumn: EndColumn of a Inf item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Value4, Value5, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):
+ self.ID = self.ID + 1
+ (Value1, Value2, Value3, Value4, Value5, Arch) = ConvertToSqlString((Value1, Value2, Value3, Value4, Value5, Arch))
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, Model, Value1, Value2, Value3, Value4, Value5, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model):
+ SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s
+ where Model = %s
+ and Enabled > -1""" % (self.Table, Model)
+ EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
+ self.Cur.execute(SqlCommand)
+ return self.Cur.fetchall()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TablePcd.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TablePcd.py
new file mode 100755
index 00000000..22d40fc4
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TablePcd.py
@@ -0,0 +1,85 @@
+## @file
+# This file is used to create/update/query/erase table for pcds
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.EdkLogger as EdkLogger
+from Table.Table import Table
+from Common.StringUtils import ConvertToSqlString
+
+## TablePcd
+#
+# This class defined a table used for pcds
+#
+# @param object: Inherited from object class
+#
+#
+class TablePcd(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Pcd'
+
+ ## Create table
+ #
+ # Create table Pcd
+ #
+ # @param ID: ID of a Pcd
+ # @param CName: CName of a Pcd
+ # @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
+ # @param Token: Token of a Pcd
+ # @param DatumType: DatumType of a Pcd
+ # @param Model: Model of a Pcd
+ # @param BelongsToFile: The Pcd belongs to which file
+ # @param BelongsToFunction: The Pcd belongs to which function
+ # @param StartLine: StartLine of a Pcd
+ # @param StartColumn: StartColumn of a Pcd
+ # @param EndLine: EndLine of a Pcd
+ # @param EndColumn: EndColumn of a Pcd
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ CName VARCHAR NOT NULL,
+ TokenSpaceGuidCName VARCHAR NOT NULL,
+ Token INTEGER,
+ DatumType VARCHAR,
+ Model INTEGER NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ BelongsToFunction SINGLE DEFAULT -1,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Pcd
+ #
+ # @param ID: ID of a Pcd
+ # @param CName: CName of a Pcd
+ # @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
+ # @param Token: Token of a Pcd
+ # @param DatumType: DatumType of a Pcd
+ # @param Model: Model of a Pcd
+ # @param BelongsToFile: The Pcd belongs to which file
+ # @param BelongsToFunction: The Pcd belongs to which function
+ # @param StartLine: StartLine of a Pcd
+ # @param StartColumn: StartColumn of a Pcd
+ # @param EndLine: EndLine of a Pcd
+ # @param EndColumn: EndColumn of a Pcd
+ #
+ def Insert(self, CName, TokenSpaceGuidCName, Token, DatumType, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn):
+ self.ID = self.ID + 1
+ (CName, TokenSpaceGuidCName, DatumType) = ConvertToSqlString((CName, TokenSpaceGuidCName, DatumType))
+ SqlCommand = """insert into %s values(%s, '%s', '%s', %s, '%s', %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, CName, TokenSpaceGuidCName, Token, DatumType, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableQuery.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableQuery.py
new file mode 100755
index 00000000..454c61dd
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableQuery.py
@@ -0,0 +1,63 @@
+## @file
+# This file is used to create/update/query/erase table for Queries
+#
+# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.EdkLogger as EdkLogger
+from Common.StringUtils import ConvertToSqlString
+from Table.Table import Table
+
+## TableQuery
+#
+# This class defined a table used for Query
+#
+# @param object: Inherited from object class
+#
+#
+class TableQuery(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Query'
+
+ ## Create table
+ #
+ # Create table Query
+ #
+ # @param ID: ID of a Query
+ # @param Name: Name of a Query
+ # @param Modifier: Modifier of a Query
+ # @param Value: Type of a Query
+ # @param Model: Model of a Query
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s(ID INTEGER PRIMARY KEY,
+ Name TEXT DEFAULT '',
+ Modifier TEXT DEFAULT '',
+ Value TEXT DEFAULT '',
+ Model INTEGER DEFAULT 0
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Query
+ #
+ # @param ID: ID of a Query
+ # @param Name: Name of a Query
+ # @param Modifier: Modifier of a Query
+ # @param Value: Value of a Query
+ # @param Model: Model of a Query
+ #
+ def Insert(self, Name, Modifier, Value, Model):
+ self.ID = self.ID + 1
+ SqlCommand = """insert into %s values(%s, '%s', '%s', '%s', %s)""" \
+ % (self.Table, self.ID, Name, Modifier, Value, Model)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableReport.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableReport.py
new file mode 100755
index 00000000..e9d1c87a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/TableReport.py
@@ -0,0 +1,127 @@
+## @file
+# This file is used to create/update/query/erase table for ECC reports
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.EdkLogger as EdkLogger
+import Common.LongFilePathOs as os, time
+from Table.Table import Table
+from Common.StringUtils import ConvertToSqlString2
+import Ecc.EccToolError as EccToolError
+import Ecc.EccGlobalData as EccGlobalData
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+## TableReport
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableReport(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Report'
+
+ ## Create table
+ #
+ # Create table report
+ #
+ # @param ID: ID of an Error
+ # @param ErrorID: ID of an Error TypeModel of a Report item
+ # @param OtherMsg: Other error message besides the standard error message
+ # @param BelongsToItem: The error belongs to which item
+ # @param Enabled: If this error enabled
+ # @param Corrected: if this error corrected
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ ErrorID INTEGER NOT NULL,
+ OtherMsg TEXT,
+ BelongsToTable TEXT NOT NULL,
+ BelongsToItem SINGLE NOT NULL,
+ Enabled INTEGER DEFAULT 0,
+ Corrected INTEGER DEFAULT -1
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table report
+ #
+ # @param ID: ID of an Error
+ # @param ErrorID: ID of an Error TypeModel of a report item
+ # @param OtherMsg: Other error message besides the standard error message
+ # @param BelongsToTable: The error item belongs to which table
+ # @param BelongsToItem: The error belongs to which item
+ # @param Enabled: If this error enabled
+ # @param Corrected: if this error corrected
+ #
+ def Insert(self, ErrorID, OtherMsg='', BelongsToTable='', BelongsToItem= -1, Enabled=0, Corrected= -1):
+ self.ID = self.ID + 1
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', %s, %s, %s)""" \
+ % (self.Table, self.ID, ErrorID, ConvertToSqlString2(OtherMsg), BelongsToTable, BelongsToItem, Enabled, Corrected)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+
+ ## Query table
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self):
+ SqlCommand = """select ID, ErrorID, OtherMsg, BelongsToTable, BelongsToItem, Corrected from %s
+ where Enabled > -1 order by ErrorID, BelongsToItem""" % (self.Table)
+ return self.Exec(SqlCommand)
+
+ ## Update table
+ #
+ def UpdateBelongsToItemByFile(self, ItemID=-1, File=""):
+ SqlCommand = """update Report set BelongsToItem=%s where BelongsToTable='File' and BelongsToItem=-2
+ and OtherMsg like '%%%s%%'""" % (ItemID, File)
+ return self.Exec(SqlCommand)
+
+ ## Convert to CSV
+ #
+ # Get all enabled records from table report and save them to a .csv file
+ #
+ # @param Filename: To filename to save the report content
+ #
+ def ToCSV(self, Filename='Report.csv'):
+ try:
+ File = open(Filename, 'w+')
+ File.write("""No, Error Code, Error Message, File, LineNo, Other Error Message\n""")
+ RecordSet = self.Query()
+ Index = 0
+ for Record in RecordSet:
+ Index = Index + 1
+ ErrorID = Record[1]
+ OtherMsg = Record[2]
+ BelongsToTable = Record[3]
+ BelongsToItem = Record[4]
+ IsCorrected = Record[5]
+ SqlCommand = ''
+ if BelongsToTable == 'File':
+ SqlCommand = """select 1, FullPath from %s where ID = %s
+ """ % (BelongsToTable, BelongsToItem)
+ else:
+ SqlCommand = """select A.StartLine, B.FullPath from %s as A, File as B
+ where A.ID = %s and B.ID = A.BelongsToFile
+ """ % (BelongsToTable, BelongsToItem)
+ NewRecord = self.Exec(SqlCommand)
+ if NewRecord != []:
+ File.write("""%s,%s,"%s",%s,%s,"%s"\n""" % (Index, ErrorID, EccToolError.gEccErrorMessage[ErrorID], NewRecord[0][1], NewRecord[0][0], OtherMsg))
+ EdkLogger.quiet("%s(%s): [%s]%s %s" % (NewRecord[0][1], NewRecord[0][0], ErrorID, EccToolError.gEccErrorMessage[ErrorID], OtherMsg))
+
+ File.close()
+ except IOError:
+ NewFilename = 'Report_' + time.strftime("%Y%m%d_%H%M%S.csv", time.localtime())
+ EdkLogger.warn("ECC", "The report file %s is locked by other progress, use %s instead!" % (Filename, NewFilename))
+ self.ToCSV(NewFilename)
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/__init__.py
new file mode 100644
index 00000000..01fd9642
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Table/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'Table' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/TargetTool/TargetTool.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/TargetTool/TargetTool.py
new file mode 100755
index 00000000..28801389
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/TargetTool/TargetTool.py
@@ -0,0 +1,254 @@
+## @file
+# Target Tool Parser
+#
+# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import print_function
+import Common.LongFilePathOs as os
+import sys
+import traceback
+from optparse import OptionParser
+
+import Common.EdkLogger as EdkLogger
+import Common.BuildToolError as BuildToolError
+from Common.DataType import *
+from Common.BuildVersion import gBUILD_VERSION
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+# To Do 1.set clean, 2. add item, if the line is disabled.
+
+class TargetTool():
+ def __init__(self, opt, args):
+ self.WorkSpace = os.path.normpath(os.getenv('WORKSPACE'))
+ self.Opt = opt
+ self.Arg = args[0]
+ self.FileName = os.path.normpath(os.path.join(self.WorkSpace, 'Conf', 'target.txt'))
+ if os.path.isfile(self.FileName) == False:
+ print("%s does not exist." % self.FileName)
+ sys.exit(1)
+ self.TargetTxtDictionary = {
+ TAB_TAT_DEFINES_ACTIVE_PLATFORM : None,
+ TAB_TAT_DEFINES_TOOL_CHAIN_CONF : None,
+ TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER : None,
+ TAB_TAT_DEFINES_TARGET : None,
+ TAB_TAT_DEFINES_TOOL_CHAIN_TAG : None,
+ TAB_TAT_DEFINES_TARGET_ARCH : None,
+ TAB_TAT_DEFINES_BUILD_RULE_CONF : None,
+ }
+ self.LoadTargetTxtFile(self.FileName)
+
+ def LoadTargetTxtFile(self, filename):
+ if os.path.exists(filename) and os.path.isfile(filename):
+ return self.ConvertTextFileToDict(filename, '#', '=')
+ else:
+ raise ParseError('LoadTargetTxtFile() : No Target.txt file exists.')
+ return 1
+
+#
+# Convert a text file to a dictionary
+#
+ def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter):
+ """Convert a text file to a dictionary of (name:value) pairs."""
+ try:
+ f = open(FileName, 'r')
+ for Line in f:
+ if Line.startswith(CommentCharacter) or Line.strip() == '':
+ continue
+ LineList = Line.split(KeySplitCharacter, 1)
+ if len(LineList) >= 2:
+ Key = LineList[0].strip()
+ if Key.startswith(CommentCharacter) == False and Key in self.TargetTxtDictionary:
+ if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM or Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF \
+ or Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER \
+ or Key == TAB_TAT_DEFINES_ACTIVE_MODULE:
+ self.TargetTxtDictionary[Key] = LineList[1].replace('\\', '/').strip()
+ elif Key == TAB_TAT_DEFINES_TARGET or Key == TAB_TAT_DEFINES_TARGET_ARCH \
+ or Key == TAB_TAT_DEFINES_TOOL_CHAIN_TAG or Key == TAB_TAT_DEFINES_BUILD_RULE_CONF:
+ self.TargetTxtDictionary[Key] = LineList[1].split()
+ f.close()
+ return 0
+ except:
+ last_type, last_value, last_tb = sys.exc_info()
+ traceback.print_exception(last_type, last_value, last_tb)
+
+ def Print(self):
+ errMsg = ''
+ for Key in self.TargetTxtDictionary:
+ if isinstance(self.TargetTxtDictionary[Key], type([])):
+ print("%-30s = %s" % (Key, ''.join(elem + ' ' for elem in self.TargetTxtDictionary[Key])))
+ elif self.TargetTxtDictionary[Key] is None:
+ errMsg += " Missing %s configuration information, please use TargetTool to set value!" % Key + os.linesep
+ else:
+ print("%-30s = %s" % (Key, self.TargetTxtDictionary[Key]))
+
+ if errMsg != '':
+ print(os.linesep + 'Warning:' + os.linesep + errMsg)
+
+ def RWFile(self, CommentCharacter, KeySplitCharacter, Num):
+ try:
+ fr = open(self.FileName, 'r')
+ fw = open(os.path.normpath(os.path.join(self.WorkSpace, 'Conf\\targetnew.txt')), 'w')
+
+ existKeys = []
+ for Line in fr:
+ if Line.startswith(CommentCharacter) or Line.strip() == '':
+ fw.write(Line)
+ else:
+ LineList = Line.split(KeySplitCharacter, 1)
+ if len(LineList) >= 2:
+ Key = LineList[0].strip()
+ if Key.startswith(CommentCharacter) == False and Key in self.TargetTxtDictionary:
+ if Key not in existKeys:
+ existKeys.append(Key)
+ else:
+ print("Warning: Found duplicate key item in original configuration files!")
+
+ if Num == 0:
+ Line = "%-30s = \n" % Key
+ else:
+ ret = GetConfigureKeyValue(self, Key)
+ if ret is not None:
+ Line = ret
+ fw.write(Line)
+ for key in self.TargetTxtDictionary:
+ if key not in existKeys:
+ print("Warning: %s does not exist in original configuration file" % key)
+ Line = GetConfigureKeyValue(self, key)
+ if Line is None:
+ Line = "%-30s = " % key
+ fw.write(Line)
+
+ fr.close()
+ fw.close()
+ os.remove(self.FileName)
+ os.rename(os.path.normpath(os.path.join(self.WorkSpace, 'Conf\\targetnew.txt')), self.FileName)
+
+ except:
+ last_type, last_value, last_tb = sys.exc_info()
+ traceback.print_exception(last_type, last_value, last_tb)
+
+def GetConfigureKeyValue(self, Key):
+ Line = None
+ if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM and self.Opt.DSCFILE is not None:
+ dscFullPath = os.path.join(self.WorkSpace, self.Opt.DSCFILE)
+ if os.path.exists(dscFullPath):
+ Line = "%-30s = %s\n" % (Key, self.Opt.DSCFILE)
+ else:
+ EdkLogger.error("TargetTool", BuildToolError.FILE_NOT_FOUND,
+ "DSC file %s does not exist!" % self.Opt.DSCFILE, RaiseError=False)
+ elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.Opt.TOOL_DEFINITION_FILE is not None:
+ tooldefFullPath = os.path.join(self.WorkSpace, self.Opt.TOOL_DEFINITION_FILE)
+ if os.path.exists(tooldefFullPath):
+ Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_DEFINITION_FILE)
+ else:
+ EdkLogger.error("TargetTool", BuildToolError.FILE_NOT_FOUND,
+ "Tooldef file %s does not exist!" % self.Opt.TOOL_DEFINITION_FILE, RaiseError=False)
+
+ elif self.Opt.NUM >= 2:
+ Line = "%-30s = %s\n" % (Key, 'Enable')
+ elif self.Opt.NUM <= 1:
+ Line = "%-30s = %s\n" % (Key, 'Disable')
+ elif Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER and self.Opt.NUM is not None:
+ Line = "%-30s = %s\n" % (Key, str(self.Opt.NUM))
+ elif Key == TAB_TAT_DEFINES_TARGET and self.Opt.TARGET is not None:
+ Line = "%-30s = %s\n" % (Key, ''.join(elem + ' ' for elem in self.Opt.TARGET))
+ elif Key == TAB_TAT_DEFINES_TARGET_ARCH and self.Opt.TARGET_ARCH is not None:
+ Line = "%-30s = %s\n" % (Key, ''.join(elem + ' ' for elem in self.Opt.TARGET_ARCH))
+ elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_TAG and self.Opt.TOOL_CHAIN_TAG is not None:
+ Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_CHAIN_TAG)
+ elif Key == TAB_TAT_DEFINES_BUILD_RULE_CONF and self.Opt.BUILD_RULE_FILE is not None:
+ buildruleFullPath = os.path.join(self.WorkSpace, self.Opt.BUILD_RULE_FILE)
+ if os.path.exists(buildruleFullPath):
+ Line = "%-30s = %s\n" % (Key, self.Opt.BUILD_RULE_FILE)
+ else:
+ EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
+ "Build rule file %s does not exist!" % self.Opt.BUILD_RULE_FILE, RaiseError=False)
+ return Line
+
+VersionNumber = ("0.01" + " " + gBUILD_VERSION)
+__version__ = "%prog Version " + VersionNumber
+__copyright__ = "Copyright (c) 2007 - 2018, Intel Corporation All rights reserved."
+__usage__ = "%prog [options] {args} \
+\nArgs: \
+\n Clean clean the all default configuration of target.txt. \
+\n Print print the all default configuration of target.txt. \
+\n Set replace the default configuration with expected value specified by option."
+
+gParamCheck = []
+def SingleCheckCallback(option, opt_str, value, parser):
+ if option not in gParamCheck:
+ setattr(parser.values, option.dest, value)
+ gParamCheck.append(option)
+ else:
+ parser.error("Option %s only allows one instance in command line!" % option)
+
+def RangeCheckCallback(option, opt_str, value, parser):
+ if option not in gParamCheck:
+ gParamCheck.append(option)
+ if value < 1 or value > 8:
+ parser.error("The count of multi-thread is not in valid range of 1 ~ 8.")
+ else:
+ setattr(parser.values, option.dest, value)
+ else:
+ parser.error("Option %s only allows one instance in command line!" % option)
+
+def MyOptionParser():
+ parser = OptionParser(version=__version__, prog="TargetTool.exe", usage=__usage__, description=__copyright__)
+ parser.add_option("-a", "--arch", action="append", dest="TARGET_ARCH",
+ help="ARCHS is one of list: IA32, X64, ARM, AARCH64 or EBC, which replaces target.txt's TARGET_ARCH definition. To specify more archs, please repeat this option. 0 will clear this setting in target.txt and can't combine with other value.")
+ parser.add_option("-p", "--platform", action="callback", type="string", dest="DSCFILE", callback=SingleCheckCallback,
+ help="Specify a DSC file, which replace target.txt's ACTIVE_PLATFORM definition. 0 will clear this setting in target.txt and can't combine with other value.")
+ parser.add_option("-c", "--tooldef", action="callback", type="string", dest="TOOL_DEFINITION_FILE", callback=SingleCheckCallback,
+ help="Specify the WORKSPACE relative path of tool_def.txt file, which replace target.txt's TOOL_CHAIN_CONF definition. 0 will clear this setting in target.txt and can't combine with other value.")
+ parser.add_option("-t", "--target", action="append", type="choice", choices=['DEBUG', 'RELEASE', '0'], dest="TARGET",
+ help="TARGET is one of list: DEBUG, RELEASE, which replaces target.txt's TARGET definition. To specify more TARGET, please repeat this option. 0 will clear this setting in target.txt and can't combine with other value.")
+ parser.add_option("-n", "--tagname", action="callback", type="string", dest="TOOL_CHAIN_TAG", callback=SingleCheckCallback,
+ help="Specify the Tool Chain Tagname, which replaces target.txt's TOOL_CHAIN_TAG definition. 0 will clear this setting in target.txt and can't combine with other value.")
+ parser.add_option("-r", "--buildrule", action="callback", type="string", dest="BUILD_RULE_FILE", callback=SingleCheckCallback,
+ help="Specify the build rule configure file, which replaces target.txt's BUILD_RULE_CONF definition. If not specified, the default value Conf/build_rule.txt will be set.")
+ parser.add_option("-m", "--multithreadnum", action="callback", type="int", dest="NUM", callback=RangeCheckCallback,
+ help="Specify the multi-thread number which replace target.txt's MAX_CONCURRENT_THREAD_NUMBER. If the value is less than 2, MULTIPLE_THREAD will be disabled. If the value is larger than 1, MULTIPLE_THREAD will be enabled.")
+ (opt, args)=parser.parse_args()
+ return (opt, args)
+
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ if os.getenv('WORKSPACE') is None:
+ print("ERROR: WORKSPACE should be specified or edksetup script should be executed before run TargetTool")
+ sys.exit(1)
+
+ (opt, args) = MyOptionParser()
+ if len(args) != 1 or (args[0].lower() != 'print' and args[0].lower() != 'clean' and args[0].lower() != 'set'):
+ print("The number of args isn't 1 or the value of args is invalid.")
+ sys.exit(1)
+ if opt.NUM is not None and opt.NUM < 1:
+ print("The MAX_CONCURRENT_THREAD_NUMBER must be larger than 0.")
+ sys.exit(1)
+ if opt.TARGET is not None and len(opt.TARGET) > 1:
+ for elem in opt.TARGET:
+ if elem == '0':
+ print("0 will clear the TARGET setting in target.txt and can't combine with other value.")
+ sys.exit(1)
+ if opt.TARGET_ARCH is not None and len(opt.TARGET_ARCH) > 1:
+ for elem in opt.TARGET_ARCH:
+ if elem == '0':
+ print("0 will clear the TARGET_ARCH setting in target.txt and can't combine with other value.")
+ sys.exit(1)
+
+ try:
+ FileHandle = TargetTool(opt, args)
+ if FileHandle.Arg.lower() == 'print':
+ FileHandle.Print()
+ sys.exit(0)
+ elif FileHandle.Arg.lower() == 'clean':
+ FileHandle.RWFile('#', '=', 0)
+ else:
+ FileHandle.RWFile('#', '=', 1)
+ except Exception as e:
+ last_type, last_value, last_tb = sys.exc_info()
+ traceback.print_exception(last_type, last_value, last_tb)
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/TargetTool/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/TargetTool/__init__.py
new file mode 100644
index 00000000..662399e4
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/TargetTool/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'TargetTool' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Trim/Trim.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Trim/Trim.py
new file mode 100755
index 00000000..548f53a2
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Trim/Trim.py
@@ -0,0 +1,627 @@
+## @file
+# Trim files preprocessed by compiler
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+import Common.LongFilePathOs as os
+import sys
+import re
+from io import BytesIO
+import codecs
+from optparse import OptionParser
+from optparse import make_option
+from Common.BuildToolError import *
+from Common.Misc import *
+from Common.DataType import *
+from Common.BuildVersion import gBUILD_VERSION
+import Common.EdkLogger as EdkLogger
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+# Version and Copyright
+__version_number__ = ("0.10" + " " + gBUILD_VERSION)
+__version__ = "%prog Version " + __version_number__
+__copyright__ = "Copyright (c) 2007-2018, Intel Corporation. All rights reserved."
+
+## Regular expression for matching Line Control directive like "#line xxx"
+gLineControlDirective = re.compile('^\s*#(?:line)?\s+([0-9]+)\s+"*([^"]*)"')
+## Regular expression for matching "typedef struct"
+gTypedefPattern = re.compile("^\s*typedef\s+struct(\s+\w+)?\s*[{]*$", re.MULTILINE)
+## Regular expression for matching "#pragma pack"
+gPragmaPattern = re.compile("^\s*#pragma\s+pack", re.MULTILINE)
+## Regular expression for matching "typedef"
+gTypedef_SinglePattern = re.compile("^\s*typedef", re.MULTILINE)
+## Regular expression for matching "typedef struct, typedef union, struct, union"
+gTypedef_MulPattern = re.compile("^\s*(typedef)?\s+(struct|union)(\s+\w+)?\s*[{]*$", re.MULTILINE)
+
+#
+# The following number pattern match will only match if following criteria is met:
+# There is leading non-(alphanumeric or _) character, and no following alphanumeric or _
+# as the pattern is greedily match, so it is ok for the gDecNumberPattern or gHexNumberPattern to grab the maximum match
+#
+## Regular expression for matching HEX number
+gHexNumberPattern = re.compile("(?<=[^a-zA-Z0-9_])(0[xX])([0-9a-fA-F]+)(U(?=$|[^a-zA-Z0-9_]))?")
+## Regular expression for matching decimal number with 'U' postfix
+gDecNumberPattern = re.compile("(?<=[^a-zA-Z0-9_])([0-9]+)U(?=$|[^a-zA-Z0-9_])")
+## Regular expression for matching constant with 'ULL' 'LL' postfix
+gLongNumberPattern = re.compile("(?<=[^a-zA-Z0-9_])(0[xX][0-9a-fA-F]+|[0-9]+)U?LL(?=$|[^a-zA-Z0-9_])")
+
+## Regular expression for matching "Include ()" in asl file
+gAslIncludePattern = re.compile("^(\s*)[iI]nclude\s*\(\"?([^\"\(\)]+)\"\)", re.MULTILINE)
+## Regular expression for matching C style #include "XXX.asl" in asl file
+gAslCIncludePattern = re.compile(r'^(\s*)#include\s*[<"]\s*([-\\/\w.]+)\s*([>"])', re.MULTILINE)
+## Patterns used to convert EDK conventions to EDK2 ECP conventions
+
+## Regular expression for finding header file inclusions
+gIncludePattern = re.compile(r"^[ \t]*[%]?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE | re.UNICODE | re.IGNORECASE)
+
+
+## file cache to avoid circular include in ASL file
+gIncludedAslFile = []
+
+## Trim preprocessed source code
+#
+# Remove extra content made by preprocessor. The preprocessor must enable the
+# line number generation option when preprocessing.
+#
+# @param Source File to be trimmed
+# @param Target File to store the trimmed content
+# @param Convert If True, convert standard HEX format to MASM format
+#
+def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
+ CreateDirectory(os.path.dirname(Target))
+ try:
+ with open(Source, "r") as File:
+ Lines = File.readlines()
+ except IOError:
+ EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
+ except:
+ EdkLogger.error("Trim", AUTOGEN_ERROR, "TrimPreprocessedFile: Error while processing file", File=Source)
+
+ PreprocessedFile = ""
+ InjectedFile = ""
+ LineIndexOfOriginalFile = None
+ NewLines = []
+ LineControlDirectiveFound = False
+ for Index in range(len(Lines)):
+ Line = Lines[Index]
+ #
+ # Find out the name of files injected by preprocessor from the lines
+ # with Line Control directive
+ #
+ MatchList = gLineControlDirective.findall(Line)
+ if MatchList != []:
+ MatchList = MatchList[0]
+ if len(MatchList) == 2:
+ LineNumber = int(MatchList[0], 0)
+ InjectedFile = MatchList[1]
+ InjectedFile = os.path.normpath(InjectedFile)
+ InjectedFile = os.path.normcase(InjectedFile)
+ # The first injected file must be the preprocessed file itself
+ if PreprocessedFile == "":
+ PreprocessedFile = InjectedFile
+ LineControlDirectiveFound = True
+ continue
+ elif PreprocessedFile == "" or InjectedFile != PreprocessedFile:
+ continue
+
+ if LineIndexOfOriginalFile is None:
+ #
+ # Any non-empty lines must be from original preprocessed file.
+ # And this must be the first one.
+ #
+ LineIndexOfOriginalFile = Index
+ EdkLogger.verbose("Found original file content starting from line %d"
+ % (LineIndexOfOriginalFile + 1))
+
+ if TrimLong:
+ Line = gLongNumberPattern.sub(r"\1", Line)
+ # convert HEX number format if indicated
+ if ConvertHex:
+ Line = gHexNumberPattern.sub(r"0\2h", Line)
+ else:
+ Line = gHexNumberPattern.sub(r"\1\2", Line)
+
+ # convert Decimal number format
+ Line = gDecNumberPattern.sub(r"\1", Line)
+
+ if LineNumber is not None:
+ EdkLogger.verbose("Got line directive: line=%d" % LineNumber)
+ # in case preprocessor removed some lines, like blank or comment lines
+ if LineNumber <= len(NewLines):
+ # possible?
+ NewLines[LineNumber - 1] = Line
+ else:
+ if LineNumber > (len(NewLines) + 1):
+ for LineIndex in range(len(NewLines), LineNumber-1):
+ NewLines.append(TAB_LINE_BREAK)
+ NewLines.append(Line)
+ LineNumber = None
+ EdkLogger.verbose("Now we have lines: %d" % len(NewLines))
+ else:
+ NewLines.append(Line)
+
+ # in case there's no line directive or linemarker found
+ if (not LineControlDirectiveFound) and NewLines == []:
+ MulPatternFlag = False
+ SinglePatternFlag = False
+ Brace = 0
+ for Index in range(len(Lines)):
+ Line = Lines[Index]
+ if MulPatternFlag == False and gTypedef_MulPattern.search(Line) is None:
+ if SinglePatternFlag == False and gTypedef_SinglePattern.search(Line) is None:
+ # remove "#pragram pack" directive
+ if gPragmaPattern.search(Line) is None:
+ NewLines.append(Line)
+ continue
+ elif SinglePatternFlag == False:
+ SinglePatternFlag = True
+ if Line.find(";") >= 0:
+ SinglePatternFlag = False
+ elif MulPatternFlag == False:
+ # found "typedef struct, typedef union, union, struct", keep its position and set a flag
+ MulPatternFlag = True
+
+ # match { and } to find the end of typedef definition
+ if Line.find("{") >= 0:
+ Brace += 1
+ elif Line.find("}") >= 0:
+ Brace -= 1
+
+ # "typedef struct, typedef union, union, struct" must end with a ";"
+ if Brace == 0 and Line.find(";") >= 0:
+ MulPatternFlag = False
+
+ # save to file
+ try:
+ with open(Target, 'w') as File:
+ File.writelines(NewLines)
+ except:
+ EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
+
+## Trim preprocessed VFR file
+#
+# Remove extra content made by preprocessor. The preprocessor doesn't need to
+# enable line number generation option when preprocessing.
+#
+# @param Source File to be trimmed
+# @param Target File to store the trimmed content
+#
+def TrimPreprocessedVfr(Source, Target):
+ CreateDirectory(os.path.dirname(Target))
+
+ try:
+ with open(Source, "r") as File:
+ Lines = File.readlines()
+ except:
+ EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
+ # read whole file
+
+ FoundTypedef = False
+ Brace = 0
+ TypedefStart = 0
+ TypedefEnd = 0
+ for Index in range(len(Lines)):
+ Line = Lines[Index]
+ # don't trim the lines from "formset" definition to the end of file
+ if Line.strip() == 'formset':
+ break
+
+ if FoundTypedef == False and (Line.find('#line') == 0 or Line.find('# ') == 0):
+ # empty the line number directive if it's not aomong "typedef struct"
+ Lines[Index] = "\n"
+ continue
+
+ if FoundTypedef == False and gTypedefPattern.search(Line) is None:
+ # keep "#pragram pack" directive
+ if gPragmaPattern.search(Line) is None:
+ Lines[Index] = "\n"
+ continue
+ elif FoundTypedef == False:
+ # found "typedef struct", keept its position and set a flag
+ FoundTypedef = True
+ TypedefStart = Index
+
+ # match { and } to find the end of typedef definition
+ if Line.find("{") >= 0:
+ Brace += 1
+ elif Line.find("}") >= 0:
+ Brace -= 1
+
+ # "typedef struct" must end with a ";"
+ if Brace == 0 and Line.find(";") >= 0:
+ FoundTypedef = False
+ TypedefEnd = Index
+ # keep all "typedef struct" except to GUID, EFI_PLABEL and PAL_CALL_RETURN
+ if Line.strip("} ;\r\n") in [TAB_GUID, "EFI_PLABEL", "PAL_CALL_RETURN"]:
+ for i in range(TypedefStart, TypedefEnd+1):
+ Lines[i] = "\n"
+
+ # save all lines trimmed
+ try:
+ with open(Target, 'w') as File:
+ File.writelines(Lines)
+ except:
+ EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
+
+## Read the content ASL file, including ASL included, recursively
+#
+# @param Source File to be read
+# @param Indent Spaces before the Include() statement
+# @param IncludePathList The list of external include file
+# @param LocalSearchPath If LocalSearchPath is specified, this path will be searched
+# first for the included file; otherwise, only the path specified
+# in the IncludePathList will be searched.
+#
+def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None, IncludeFileList = None, filetype=None):
+ NewFileContent = []
+ if IncludeFileList is None:
+ IncludeFileList = []
+ try:
+ #
+ # Search LocalSearchPath first if it is specified.
+ #
+ if LocalSearchPath:
+ SearchPathList = [LocalSearchPath] + IncludePathList
+ else:
+ SearchPathList = IncludePathList
+
+ for IncludePath in SearchPathList:
+ IncludeFile = os.path.join(IncludePath, Source)
+ if os.path.isfile(IncludeFile):
+ try:
+ with open(IncludeFile, "r") as File:
+ F = File.readlines()
+ except:
+ with codecs.open(IncludeFile, "r", encoding='utf-8') as File:
+ F = File.readlines()
+ break
+ else:
+ EdkLogger.warn("Trim", "Failed to find include file %s" % Source)
+ return []
+ except:
+ EdkLogger.warn("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
+ return []
+
+
+ # avoid A "include" B and B "include" A
+ IncludeFile = os.path.abspath(os.path.normpath(IncludeFile))
+ if IncludeFile in gIncludedAslFile:
+ EdkLogger.warn("Trim", "Circular include",
+ ExtraData= "%s -> %s" % (" -> ".join(gIncludedAslFile), IncludeFile))
+ return []
+ gIncludedAslFile.append(IncludeFile)
+ IncludeFileList.append(IncludeFile.strip())
+ for Line in F:
+ LocalSearchPath = None
+ if filetype == "ASL":
+ Result = gAslIncludePattern.findall(Line)
+ if len(Result) == 0:
+ Result = gAslCIncludePattern.findall(Line)
+ if len(Result) == 0 or os.path.splitext(Result[0][1])[1].lower() not in [".asl", ".asi"]:
+ NewFileContent.append("%s%s" % (Indent, Line))
+ continue
+ #
+ # We should first search the local directory if current file are using pattern #include "XXX"
+ #
+ if Result[0][2] == '"':
+ LocalSearchPath = os.path.dirname(IncludeFile)
+ CurrentIndent = Indent + Result[0][0]
+ IncludedFile = Result[0][1]
+ NewFileContent.extend(DoInclude(IncludedFile, CurrentIndent, IncludePathList, LocalSearchPath,IncludeFileList,filetype))
+ NewFileContent.append("\n")
+ elif filetype == "ASM":
+ Result = gIncludePattern.findall(Line)
+ if len(Result) == 0:
+ NewFileContent.append("%s%s" % (Indent, Line))
+ continue
+
+ IncludedFile = Result[0]
+
+ IncludedFile = IncludedFile.strip()
+ IncludedFile = os.path.normpath(IncludedFile)
+ NewFileContent.extend(DoInclude(IncludedFile, '', IncludePathList, LocalSearchPath,IncludeFileList,filetype))
+ NewFileContent.append("\n")
+
+ gIncludedAslFile.pop()
+
+ return NewFileContent
+
+
+## Trim ASL file
+#
+# Replace ASL include statement with the content the included file
+#
+# @param Source File to be trimmed
+# @param Target File to store the trimmed content
+# @param IncludePathFile The file to log the external include path
+#
+def TrimAslFile(Source, Target, IncludePathFile,AslDeps = False):
+ CreateDirectory(os.path.dirname(Target))
+
+ SourceDir = os.path.dirname(Source)
+ if SourceDir == '':
+ SourceDir = '.'
+
+ #
+ # Add source directory as the first search directory
+ #
+ IncludePathList = [SourceDir]
+
+ #
+ # If additional include path file is specified, append them all
+ # to the search directory list.
+ #
+ if IncludePathFile:
+ try:
+ LineNum = 0
+ with open(IncludePathFile, 'r') as File:
+ FileLines = File.readlines()
+ for Line in FileLines:
+ LineNum += 1
+ if Line.startswith("/I") or Line.startswith ("-I"):
+ IncludePathList.append(Line[2:].strip())
+ else:
+ EdkLogger.warn("Trim", "Invalid include line in include list file.", IncludePathFile, LineNum)
+ except:
+ EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=IncludePathFile)
+ AslIncludes = []
+ Lines = DoInclude(Source, '', IncludePathList,IncludeFileList=AslIncludes,filetype='ASL')
+ AslIncludes = [item for item in AslIncludes if item !=Source]
+ SaveFileOnChange(os.path.join(os.path.dirname(Target),os.path.basename(Source))+".trim.deps", " \\\n".join([Source+":"] +AslIncludes),False)
+
+ #
+ # Undef MIN and MAX to avoid collision in ASL source code
+ #
+ Lines.insert(0, "#undef MIN\n#undef MAX\n")
+
+ # save all lines trimmed
+ try:
+ with open(Target, 'w') as File:
+ File.writelines(Lines)
+ except:
+ EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
+
+## Trim ASM file
+#
+# Output ASM include statement with the content the included file
+#
+# @param Source File to be trimmed
+# @param Target File to store the trimmed content
+# @param IncludePathFile The file to log the external include path
+#
+def TrimAsmFile(Source, Target, IncludePathFile):
+ CreateDirectory(os.path.dirname(Target))
+
+ SourceDir = os.path.dirname(Source)
+ if SourceDir == '':
+ SourceDir = '.'
+
+ #
+ # Add source directory as the first search directory
+ #
+ IncludePathList = [SourceDir]
+ #
+ # If additional include path file is specified, append them all
+ # to the search directory list.
+ #
+ if IncludePathFile:
+ try:
+ LineNum = 0
+ with open(IncludePathFile, 'r') as File:
+ FileLines = File.readlines()
+ for Line in FileLines:
+ LineNum += 1
+ if Line.startswith("/I") or Line.startswith ("-I"):
+ IncludePathList.append(Line[2:].strip())
+ else:
+ EdkLogger.warn("Trim", "Invalid include line in include list file.", IncludePathFile, LineNum)
+ except:
+ EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=IncludePathFile)
+ AsmIncludes = []
+ Lines = DoInclude(Source, '', IncludePathList,IncludeFileList=AsmIncludes,filetype='ASM')
+ AsmIncludes = [item for item in AsmIncludes if item != Source]
+ if AsmIncludes:
+ SaveFileOnChange(os.path.join(os.path.dirname(Target),os.path.basename(Source))+".trim.deps", " \\\n".join([Source+":"] +AsmIncludes),False)
+ # save all lines trimmed
+ try:
+ with open(Target, 'w') as File:
+ File.writelines(Lines)
+ except:
+ EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
+
+def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile):
+ VfrNameList = []
+ if os.path.isdir(DebugDir):
+ for CurrentDir, Dirs, Files in os.walk(DebugDir):
+ for FileName in Files:
+ Name, Ext = os.path.splitext(FileName)
+ if Ext == '.c' and Name != 'AutoGen':
+ VfrNameList.append (Name + 'Bin')
+
+ VfrNameList.append (ModuleName + 'Strings')
+
+ EfiFileName = os.path.join(DebugDir, ModuleName + '.efi')
+ MapFileName = os.path.join(DebugDir, ModuleName + '.map')
+ VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrNameList)
+
+ if not VfrUniOffsetList:
+ return
+
+ try:
+ fInputfile = open(OutputFile, "wb+")
+ except:
+ EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None)
+
+ # Use a instance of BytesIO to cache data
+ fStringIO = BytesIO()
+
+ for Item in VfrUniOffsetList:
+ if (Item[0].find("Strings") != -1):
+ #
+ # UNI offset in image.
+ # GUID + Offset
+ # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
+ #
+ UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
+ fStringIO.write(UniGuid)
+ UniValue = pack ('Q', int (Item[1], 16))
+ fStringIO.write (UniValue)
+ else:
+ #
+ # VFR binary offset in image.
+ # GUID + Offset
+ # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
+ #
+ VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
+ fStringIO.write(VfrGuid)
+ type (Item[1])
+ VfrValue = pack ('Q', int (Item[1], 16))
+ fStringIO.write (VfrValue)
+
+ #
+ # write data into file.
+ #
+ try :
+ fInputfile.write (fStringIO.getvalue())
+ except:
+ EdkLogger.error("Trim", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %OutputFile, None)
+
+ fStringIO.close ()
+ fInputfile.close ()
+
+
+## Parse command line options
+#
+# Using standard Python module optparse to parse command line option of this tool.
+#
+# @retval Options A optparse.Values object containing the parsed options
+# @retval InputFile Path of file to be trimmed
+#
+def Options():
+ OptionList = [
+ make_option("-s", "--source-code", dest="FileType", const="SourceCode", action="store_const",
+ help="The input file is preprocessed source code, including C or assembly code"),
+ make_option("-r", "--vfr-file", dest="FileType", const="Vfr", action="store_const",
+ help="The input file is preprocessed VFR file"),
+ make_option("--Vfr-Uni-Offset", dest="FileType", const="VfrOffsetBin", action="store_const",
+ help="The input file is EFI image"),
+ make_option("--asl-deps", dest="AslDeps", const="True", action="store_const",
+ help="Generate Asl dependent files."),
+ make_option("-a", "--asl-file", dest="FileType", const="Asl", action="store_const",
+ help="The input file is ASL file"),
+ make_option( "--asm-file", dest="FileType", const="Asm", action="store_const",
+ help="The input file is asm file"),
+ make_option("-c", "--convert-hex", dest="ConvertHex", action="store_true",
+ help="Convert standard hex format (0xabcd) to MASM format (abcdh)"),
+
+ make_option("-l", "--trim-long", dest="TrimLong", action="store_true",
+ help="Remove postfix of long number"),
+ make_option("-i", "--include-path-file", dest="IncludePathFile",
+ help="The input file is include path list to search for ASL include file"),
+ make_option("-o", "--output", dest="OutputFile",
+ help="File to store the trimmed content"),
+ make_option("--ModuleName", dest="ModuleName", help="The module's BASE_NAME"),
+ make_option("--DebugDir", dest="DebugDir",
+ help="Debug Output directory to store the output files"),
+ make_option("-v", "--verbose", dest="LogLevel", action="store_const", const=EdkLogger.VERBOSE,
+ help="Run verbosely"),
+ make_option("-d", "--debug", dest="LogLevel", type="int",
+ help="Run with debug information"),
+ make_option("-q", "--quiet", dest="LogLevel", action="store_const", const=EdkLogger.QUIET,
+ help="Run quietly"),
+ make_option("-?", action="help", help="show this help message and exit"),
+ ]
+
+ # use clearer usage to override default usage message
+ UsageString = "%prog [-s|-r|-a|--Vfr-Uni-Offset] [-c] [-v|-d <debug_level>|-q] [-i <include_path_file>] [-o <output_file>] [--ModuleName <ModuleName>] [--DebugDir <DebugDir>] [<input_file>]"
+
+ Parser = OptionParser(description=__copyright__, version=__version__, option_list=OptionList, usage=UsageString)
+ Parser.set_defaults(FileType="Vfr")
+ Parser.set_defaults(ConvertHex=False)
+ Parser.set_defaults(LogLevel=EdkLogger.INFO)
+
+ Options, Args = Parser.parse_args()
+
+ # error check
+ if Options.FileType == 'VfrOffsetBin':
+ if len(Args) == 0:
+ return Options, ''
+ elif len(Args) > 1:
+ EdkLogger.error("Trim", OPTION_NOT_SUPPORTED, ExtraData=Parser.get_usage())
+ if len(Args) == 0:
+ EdkLogger.error("Trim", OPTION_MISSING, ExtraData=Parser.get_usage())
+ if len(Args) > 1:
+ EdkLogger.error("Trim", OPTION_NOT_SUPPORTED, ExtraData=Parser.get_usage())
+
+ InputFile = Args[0]
+ return Options, InputFile
+
+## Entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @retval 0 Tool was successful
+# @retval 1 Tool failed
+#
+def Main():
+ try:
+ EdkLogger.Initialize()
+ CommandOptions, InputFile = Options()
+ if CommandOptions.LogLevel < EdkLogger.DEBUG_9:
+ EdkLogger.SetLevel(CommandOptions.LogLevel + 1)
+ else:
+ EdkLogger.SetLevel(CommandOptions.LogLevel)
+ except FatalError as X:
+ return 1
+
+ try:
+ if CommandOptions.FileType == "Vfr":
+ if CommandOptions.OutputFile is None:
+ CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
+ TrimPreprocessedVfr(InputFile, CommandOptions.OutputFile)
+ elif CommandOptions.FileType == "Asl":
+ if CommandOptions.OutputFile is None:
+ CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
+ TrimAslFile(InputFile, CommandOptions.OutputFile, CommandOptions.IncludePathFile,CommandOptions.AslDeps)
+ elif CommandOptions.FileType == "VfrOffsetBin":
+ GenerateVfrBinSec(CommandOptions.ModuleName, CommandOptions.DebugDir, CommandOptions.OutputFile)
+ elif CommandOptions.FileType == "Asm":
+ TrimAsmFile(InputFile, CommandOptions.OutputFile, CommandOptions.IncludePathFile)
+ else :
+ if CommandOptions.OutputFile is None:
+ CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
+ TrimPreprocessedFile(InputFile, CommandOptions.OutputFile, CommandOptions.ConvertHex, CommandOptions.TrimLong)
+ except FatalError as X:
+ import platform
+ import traceback
+ if CommandOptions is not None and CommandOptions.LogLevel <= EdkLogger.DEBUG_9:
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ return 1
+ except:
+ import traceback
+ import platform
+ EdkLogger.error(
+ "\nTrim",
+ CODE_ERROR,
+ "Unknown fatal error when trimming [%s]" % InputFile,
+ ExtraData="\n(Please send email to %s for help, attaching following call stack trace!)\n" % MSG_EDKII_MAIL_ADDR,
+ RaiseError=False
+ )
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ return 1
+
+ return 0
+
+if __name__ == '__main__':
+ r = Main()
+ ## 0-127 is a safe return range, and 1 is a standard default error
+ if r < 0 or r > 127: r = 1
+ sys.exit(r)
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/BuildVersion.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/BuildVersion.py
new file mode 100644
index 00000000..e62b8ef3
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/BuildVersion.py
@@ -0,0 +1,14 @@
+## @file
+#
+# This file is for build version number auto generation
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Build version information
+'''
+
+gBUILD_VERSION = "Developer Build based on Revision: Unknown"
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/DependencyRules.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/DependencyRules.py
new file mode 100755
index 00000000..2742db84
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/DependencyRules.py
@@ -0,0 +1,448 @@
+## @file
+# This file is for installed package information database operations
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Dependency
+'''
+
+##
+# Import Modules
+#
+from os.path import dirname
+import os
+
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Library.Parsing import GetWorkspacePackage
+from Library.Parsing import GetWorkspaceModule
+from Library.Parsing import GetPkgInfoFromDec
+from Library.Misc import GetRelativePath
+from Library import GlobalData
+from Logger.ToolError import FatalError
+from Logger.ToolError import EDK1_INF_ERROR
+from Logger.ToolError import UNKNOWN_ERROR
+(DEPEX_CHECK_SUCCESS, DEPEX_CHECK_MODULE_NOT_FOUND, \
+DEPEX_CHECK_PACKAGE_NOT_FOUND, DEPEX_CHECK_DP_NOT_FOUND) = (0, 1, 2, 3)
+
+
+## DependencyRules
+#
+# This class represents the dependency rule check mechanism
+#
+# @param object: Inherited from object class
+#
+class DependencyRules(object):
+ def __init__(self, Datab, ToBeInstalledPkgList=None):
+ self.IpiDb = Datab
+ self.WsPkgList = GetWorkspacePackage()
+ self.WsModuleList = GetWorkspaceModule()
+
+ self.PkgsToBeDepend = [(PkgInfo[1], PkgInfo[2]) for PkgInfo in self.WsPkgList]
+
+ # Add package info from the DIST to be installed.
+ self.PkgsToBeDepend.extend(self.GenToBeInstalledPkgList(ToBeInstalledPkgList))
+
+ def GenToBeInstalledPkgList(self, ToBeInstalledPkgList):
+ if not ToBeInstalledPkgList:
+ return []
+ RtnList = []
+ for Dist in ToBeInstalledPkgList:
+ for Package in Dist.PackageSurfaceArea:
+ RtnList.append((Package[0], Package[1]))
+
+ return RtnList
+
+ ## Check whether a module exists by checking the Guid+Version+Name+Path combination
+ #
+ # @param Guid: Guid of a module
+ # @param Version: Version of a module
+ # @param Name: Name of a module
+ # @param Path: Path of a module
+ # @return: True if module existed, else False
+ #
+ def CheckModuleExists(self, Guid, Version, Name, Path):
+ Logger.Verbose(ST.MSG_CHECK_MODULE_EXIST)
+ ModuleList = self.IpiDb.GetModInPackage(Guid, Version, Name, Path)
+ ModuleList.extend(self.IpiDb.GetStandaloneModule(Guid, Version, Name, Path))
+ Logger.Verbose(ST.MSG_CHECK_MODULE_EXIST_FINISH)
+ if len(ModuleList) > 0:
+ return True
+ else:
+ return False
+
+ ## Check whether a module depex satisfied.
+ #
+ # @param ModuleObj: A module object
+ # @param DpObj: A distribution object
+ # @return: True if module depex satisfied
+ # False else
+ #
+ def CheckModuleDepexSatisfied(self, ModuleObj, DpObj=None):
+ Logger.Verbose(ST.MSG_CHECK_MODULE_DEPEX_START)
+ Result = True
+ Dep = None
+ if ModuleObj.GetPackageDependencyList():
+ Dep = ModuleObj.GetPackageDependencyList()[0]
+ for Dep in ModuleObj.GetPackageDependencyList():
+ #
+ # first check whether the dependency satisfied by current workspace
+ #
+ Exist = self.CheckPackageExists(Dep.GetGuid(), Dep.GetVersion())
+ #
+ # check whether satisfied by current distribution
+ #
+ if not Exist:
+ if DpObj is None:
+ Result = False
+ break
+ for GuidVerPair in DpObj.PackageSurfaceArea.keys():
+ if Dep.GetGuid() == GuidVerPair[0]:
+ if Dep.GetVersion() is None or \
+ len(Dep.GetVersion()) == 0:
+ Result = True
+ break
+ if Dep.GetVersion() == GuidVerPair[1]:
+ Result = True
+ break
+ else:
+ Result = False
+ break
+
+ if not Result:
+ Logger.Error("CheckModuleDepex", UNKNOWN_ERROR, \
+ ST.ERR_DEPENDENCY_NOT_MATCH % (ModuleObj.GetName(), \
+ Dep.GetPackageFilePath(), \
+ Dep.GetGuid(), \
+ Dep.GetVersion()))
+ return Result
+
+ ## Check whether a package exists in a package list specified by PkgsToBeDepend.
+ #
+ # @param Guid: Guid of a package
+ # @param Version: Version of a package
+ # @return: True if package exist
+ # False else
+ #
+ def CheckPackageExists(self, Guid, Version):
+ Logger.Verbose(ST.MSG_CHECK_PACKAGE_START)
+ Found = False
+ for (PkgGuid, PkgVer) in self.PkgsToBeDepend:
+ if (PkgGuid == Guid):
+ #
+ # if version is not empty and not equal, then not match
+ #
+ if Version and (PkgVer != Version):
+ Found = False
+ break
+ else:
+ Found = True
+ break
+ else:
+ Found = False
+
+ Logger.Verbose(ST.MSG_CHECK_PACKAGE_FINISH)
+ return Found
+
+ ## Check whether a package depex satisfied.
+ #
+ # @param PkgObj: A package object
+ # @param DpObj: A distribution object
+ # @return: True if package depex satisfied
+ # False else
+ #
+ def CheckPackageDepexSatisfied(self, PkgObj, DpObj=None):
+ ModuleDict = PkgObj.GetModuleDict()
+ for ModKey in ModuleDict.keys():
+ ModObj = ModuleDict[ModKey]
+ if self.CheckModuleDepexSatisfied(ModObj, DpObj):
+ continue
+ else:
+ return False
+ return True
+
+ ## Check whether a DP exists.
+ #
+ # @param Guid: Guid of a Distribution
+ # @param Version: Version of a Distribution
+ # @return: True if Distribution exist
+ # False else
+ def CheckDpExists(self, Guid, Version):
+ Logger.Verbose(ST.MSG_CHECK_DP_START)
+ DpList = self.IpiDb.GetDp(Guid, Version)
+ if len(DpList) > 0:
+ Found = True
+ else:
+ Found = False
+
+ Logger.Verbose(ST.MSG_CHECK_DP_FINISH)
+ return Found
+
+ ## Check whether a DP depex satisfied by current workspace for Install
+ #
+ # @param DpObj: A distribution object
+ # @return: True if distribution depex satisfied
+ # False else
+ #
+ def CheckInstallDpDepexSatisfied(self, DpObj):
+ return self.CheckDpDepexSatisfied(DpObj)
+
+ # # Check whether multiple DP depex satisfied by current workspace for Install
+ #
+ # @param DpObjList: A distribution object list
+ # @return: True if distribution depex satisfied
+ # False else
+ #
+ def CheckTestInstallPdDepexSatisfied(self, DpObjList):
+ for DpObj in DpObjList:
+ if self.CheckDpDepexSatisfied(DpObj):
+ for PkgKey in DpObj.PackageSurfaceArea.keys():
+ PkgObj = DpObj.PackageSurfaceArea[PkgKey]
+ self.PkgsToBeDepend.append((PkgObj.Guid, PkgObj.Version))
+ else:
+ return False, DpObj
+
+ return True, DpObj
+
+
+ ## Check whether a DP depex satisfied by current workspace
+ # (excluding the original distribution's packages to be replaced) for Replace
+ #
+ # @param DpObj: A distribution object
+ # @param OrigDpGuid: The original distribution's Guid
+ # @param OrigDpVersion: The original distribution's Version
+ #
+ def ReplaceCheckNewDpDepex(self, DpObj, OrigDpGuid, OrigDpVersion):
+ self.PkgsToBeDepend = [(PkgInfo[1], PkgInfo[2]) for PkgInfo in self.WsPkgList]
+ OrigDpPackageList = self.IpiDb.GetPackageListFromDp(OrigDpGuid, OrigDpVersion)
+ for OrigPkgInfo in OrigDpPackageList:
+ Guid, Version = OrigPkgInfo[0], OrigPkgInfo[1]
+ if (Guid, Version) in self.PkgsToBeDepend:
+ self.PkgsToBeDepend.remove((Guid, Version))
+ return self.CheckDpDepexSatisfied(DpObj)
+
+ ## Check whether a DP depex satisfied by current workspace.
+ #
+ # @param DpObj: A distribution object
+ #
+ def CheckDpDepexSatisfied(self, DpObj):
+ for PkgKey in DpObj.PackageSurfaceArea.keys():
+ PkgObj = DpObj.PackageSurfaceArea[PkgKey]
+ if self.CheckPackageDepexSatisfied(PkgObj, DpObj):
+ continue
+ else:
+ return False
+
+ for ModKey in DpObj.ModuleSurfaceArea.keys():
+ ModObj = DpObj.ModuleSurfaceArea[ModKey]
+ if self.CheckModuleDepexSatisfied(ModObj, DpObj):
+ continue
+ else:
+ return False
+
+ return True
+
+ ## Check whether a DP could be removed from current workspace.
+ #
+ # @param DpGuid: File's guid
+ # @param DpVersion: File's version
+ # @retval Removable: True if distribution could be removed, False Else
+ # @retval DependModuleList: the list of modules that make distribution can not be removed
+ #
+ def CheckDpDepexForRemove(self, DpGuid, DpVersion):
+ Removable = True
+ DependModuleList = []
+ WsModuleList = self.WsModuleList
+ #
+ # remove modules that included in current DP
+ # List of item (FilePath)
+ DpModuleList = self.IpiDb.GetDpModuleList(DpGuid, DpVersion)
+ for Module in DpModuleList:
+ if Module in WsModuleList:
+ WsModuleList.remove(Module)
+ else:
+ Logger.Warn("UPT\n",
+ ST.ERR_MODULE_NOT_INSTALLED % Module)
+ #
+ # get packages in current Dp and find the install path
+ # List of item (PkgGuid, PkgVersion, InstallPath)
+ DpPackageList = self.IpiDb.GetPackageListFromDp(DpGuid, DpVersion)
+ DpPackagePathList = []
+ WorkSP = GlobalData.gWORKSPACE
+ for (PkgName, PkgGuid, PkgVersion, DecFile) in self.WsPkgList:
+ if PkgName:
+ pass
+ DecPath = dirname(DecFile)
+ if DecPath.find(WorkSP) > -1:
+ InstallPath = GetRelativePath(DecPath, WorkSP)
+ DecFileRelaPath = GetRelativePath(DecFile, WorkSP)
+ else:
+ InstallPath = DecPath
+ DecFileRelaPath = DecFile
+
+ if (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
+ DpPackagePathList.append(DecFileRelaPath)
+ DpPackageList.remove((PkgGuid, PkgVersion, InstallPath))
+
+ #
+ # the left items in DpPackageList are the packages that installed but not found anymore
+ #
+ for (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
+ Logger.Warn("UPT",
+ ST.WARN_INSTALLED_PACKAGE_NOT_FOUND%(PkgGuid, PkgVersion, InstallPath))
+
+ #
+ # check modules to see if has dependency on package of current DP
+ #
+ for Module in WsModuleList:
+ if (not VerifyRemoveModuleDep(Module, DpPackagePathList)):
+ Removable = False
+ DependModuleList.append(Module)
+ return (Removable, DependModuleList)
+
+
+ ## Check whether a DP could be replaced by a distribution containing NewDpPkgList
+ # from current workspace.
+ #
+ # @param OrigDpGuid: original Dp's Guid
+ # @param OrigDpVersion: original Dp's version
+ # @param NewDpPkgList: a list of package information (Guid, Version) in new Dp
+ # @retval Replaceable: True if distribution could be replaced, False Else
+ # @retval DependModuleList: the list of modules that make distribution can not be replaced
+ #
+ def CheckDpDepexForReplace(self, OrigDpGuid, OrigDpVersion, NewDpPkgList):
+ Replaceable = True
+ DependModuleList = []
+ WsModuleList = self.WsModuleList
+ #
+ # remove modules that included in current DP
+ # List of item (FilePath)
+ DpModuleList = self.IpiDb.GetDpModuleList(OrigDpGuid, OrigDpVersion)
+ for Module in DpModuleList:
+ if Module in WsModuleList:
+ WsModuleList.remove(Module)
+ else:
+ Logger.Warn("UPT\n",
+ ST.ERR_MODULE_NOT_INSTALLED % Module)
+
+ OtherPkgList = NewDpPkgList
+ #
+ # get packages in current Dp and find the install path
+ # List of item (PkgGuid, PkgVersion, InstallPath)
+ DpPackageList = self.IpiDb.GetPackageListFromDp(OrigDpGuid, OrigDpVersion)
+ DpPackagePathList = []
+ WorkSP = GlobalData.gWORKSPACE
+ for (PkgName, PkgGuid, PkgVersion, DecFile) in self.WsPkgList:
+ if PkgName:
+ pass
+ DecPath = dirname(DecFile)
+ if DecPath.find(WorkSP) > -1:
+ InstallPath = GetRelativePath(DecPath, WorkSP)
+ DecFileRelaPath = GetRelativePath(DecFile, WorkSP)
+ else:
+ InstallPath = DecPath
+ DecFileRelaPath = DecFile
+
+ if (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
+ DpPackagePathList.append(DecFileRelaPath)
+ DpPackageList.remove((PkgGuid, PkgVersion, InstallPath))
+ else:
+ OtherPkgList.append((PkgGuid, PkgVersion))
+
+ #
+ # the left items in DpPackageList are the packages that installed but not found anymore
+ #
+ for (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
+ Logger.Warn("UPT",
+ ST.WARN_INSTALLED_PACKAGE_NOT_FOUND%(PkgGuid, PkgVersion, InstallPath))
+
+ #
+ # check modules to see if it can be satisfied by package not belong to removed DP
+ #
+ for Module in WsModuleList:
+ if (not VerifyReplaceModuleDep(Module, DpPackagePathList, OtherPkgList)):
+ Replaceable = False
+ DependModuleList.append(Module)
+ return (Replaceable, DependModuleList)
+
+
+## check whether module depends on packages in DpPackagePathList, return True
+# if found, False else
+#
+# @param Path: a module path
+# @param DpPackagePathList: a list of Package Paths
+# @retval: False: module depends on package in DpPackagePathList
+# True: module doesn't depend on package in DpPackagePathList
+#
+def VerifyRemoveModuleDep(Path, DpPackagePathList):
+ try:
+ for Item in GetPackagePath(Path):
+ if Item in DpPackagePathList:
+ DecPath = os.path.normpath(os.path.join(GlobalData.gWORKSPACE, Item))
+ Logger.Info(ST.MSG_MODULE_DEPEND_ON % (Path, DecPath))
+ return False
+ else:
+ return True
+ except FatalError as ErrCode:
+ if ErrCode.message == EDK1_INF_ERROR:
+ Logger.Warn("UPT",
+ ST.WRN_EDK1_INF_FOUND%Path)
+ return True
+ else:
+ return True
+
+# # GetPackagePath
+#
+# Get Dependency package path from an Inf file path
+#
+def GetPackagePath(InfPath):
+ PackagePath = []
+ if os.path.exists(InfPath):
+ FindSection = False
+ for Line in open(InfPath).readlines():
+ Line = Line.strip()
+ if not Line:
+ continue
+ if Line.startswith('#'):
+ continue
+ if Line.startswith('[Packages') and Line.endswith(']'):
+ FindSection = True
+ continue
+ if Line.startswith('[') and Line.endswith(']') and FindSection:
+ break
+ if FindSection:
+ PackagePath.append(os.path.normpath(Line))
+
+ return PackagePath
+
+## check whether module depends on packages in DpPackagePathList and can not be satisfied by OtherPkgList
+#
+# @param Path: a module path
+# @param DpPackagePathList: a list of Package Paths
+# @param OtherPkgList: a list of Package Information (Guid, Version)
+# @retval: False: module depends on package in DpPackagePathList and can not be satisfied by OtherPkgList
+# True: either module doesn't depend on DpPackagePathList or module depends on DpPackagePathList
+# but can be satisfied by OtherPkgList
+#
+def VerifyReplaceModuleDep(Path, DpPackagePathList, OtherPkgList):
+ try:
+ for Item in GetPackagePath(Path):
+ if Item in DpPackagePathList:
+ DecPath = os.path.normpath(os.path.join(GlobalData.gWORKSPACE, Item))
+ Name, Guid, Version = GetPkgInfoFromDec(DecPath)
+ if (Guid, Version) not in OtherPkgList:
+ Logger.Info(ST.MSG_MODULE_DEPEND_ON % (Path, DecPath))
+ return False
+ else:
+ return True
+ except FatalError as ErrCode:
+ if ErrCode.message == EDK1_INF_ERROR:
+ Logger.Warn("UPT",
+ ST.WRN_EDK1_INF_FOUND%Path)
+ return True
+ else:
+ return True
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/DistributionPackageClass.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/DistributionPackageClass.py
new file mode 100755
index 00000000..c9b3ccbc
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/DistributionPackageClass.py
@@ -0,0 +1,267 @@
+## @file
+# This file is used to define a class object to describe a distribution package
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+DistributionPackageClass
+'''
+
+##
+# Import Modules
+#
+import os.path
+
+from Library.Misc import Sdict
+from Library.Misc import GetNonMetaDataFiles
+from PomAdapter.InfPomAlignment import InfPomAlignment
+from PomAdapter.DecPomAlignment import DecPomAlignment
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger.ToolError import OPTION_VALUE_INVALID
+from Logger.ToolError import FatalError
+from Logger.ToolError import EDK1_INF_ERROR
+from Object.POM.CommonObject import IdentificationObject
+from Object.POM.CommonObject import CommonHeaderObject
+from Object.POM.CommonObject import MiscFileObject
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+
+## DistributionPackageHeaderClass
+#
+# @param IdentificationObject: Identification Object
+# @param CommonHeaderObject: Common Header Object
+#
+class DistributionPackageHeaderObject(IdentificationObject, \
+ CommonHeaderObject):
+ def __init__(self):
+ IdentificationObject.__init__(self)
+ CommonHeaderObject.__init__(self)
+ self.ReadOnly = ''
+ self.RePackage = ''
+ self.Vendor = ''
+ self.Date = ''
+ self.Signature = 'Md5Sum'
+ self.XmlSpecification = ''
+
+ def GetReadOnly(self):
+ return self.ReadOnly
+
+ def SetReadOnly(self, ReadOnly):
+ self.ReadOnly = ReadOnly
+
+ def GetRePackage(self):
+ return self.RePackage
+
+ def SetRePackage(self, RePackage):
+ self.RePackage = RePackage
+
+ def GetVendor(self):
+ return self.Vendor
+
+ def SetDate(self, Date):
+ self.Date = Date
+
+ def GetDate(self):
+ return self.Date
+
+ def SetSignature(self, Signature):
+ self.Signature = Signature
+
+ def GetSignature(self):
+ return self.Signature
+
+ def SetXmlSpecification(self, XmlSpecification):
+ self.XmlSpecification = XmlSpecification
+
+ def GetXmlSpecification(self):
+ return self.XmlSpecification
+
+## DistributionPackageClass
+#
+# @param object: DistributionPackageClass
+#
+class DistributionPackageClass(object):
+ def __init__(self):
+ self.Header = DistributionPackageHeaderObject()
+ #
+ # {(Guid, Version, Path) : PackageObj}
+ #
+ self.PackageSurfaceArea = Sdict()
+ #
+ # {(Guid, Version, Name, Path) : ModuleObj}
+ #
+ self.ModuleSurfaceArea = Sdict()
+ self.Tools = MiscFileObject()
+ self.MiscellaneousFiles = MiscFileObject()
+ self.UserExtensions = []
+ self.FileList = []
+
+ ## Get all included packages and modules for a distribution package
+ #
+ # @param WorkspaceDir: WorkspaceDir
+ # @param PackageList: A list of all packages
+ # @param ModuleList: A list of all modules
+ #
+ def GetDistributionPackage(self, WorkspaceDir, PackageList, ModuleList):
+ # Backup WorkspaceDir
+ Root = WorkspaceDir
+
+ #
+ # Get Packages
+ #
+ if PackageList:
+ for PackageFile in PackageList:
+ PackageFileFullPath = mws.join(Root, PackageFile)
+ WorkspaceDir = mws.getWs(Root, PackageFile)
+ DecObj = DecPomAlignment(PackageFileFullPath, WorkspaceDir, CheckMulDec=True)
+ PackageObj = DecObj
+ #
+ # Parser inf file one bye one
+ #
+ ModuleInfFileList = PackageObj.GetModuleFileList()
+ for File in ModuleInfFileList:
+ WsRelPath = os.path.join(PackageObj.GetPackagePath(), File)
+ WsRelPath = os.path.normpath(WsRelPath)
+ if ModuleList and WsRelPath in ModuleList:
+ Logger.Error("UPT",
+ OPTION_VALUE_INVALID,
+ ST.ERR_NOT_STANDALONE_MODULE_ERROR%\
+ (WsRelPath, PackageFile))
+ Filename = os.path.normpath\
+ (os.path.join(PackageObj.GetRelaPath(), File))
+ os.path.splitext(Filename)
+ #
+ # Call INF parser to generate Inf Object.
+ # Actually, this call is not directly call, but wrapped by
+ # Inf class in InfPomAlignment.
+ #
+ try:
+ ModuleObj = InfPomAlignment(Filename, WorkspaceDir, PackageObj.GetPackagePath())
+
+ #
+ # Add module to package
+ #
+ ModuleDict = PackageObj.GetModuleDict()
+ ModuleDict[(ModuleObj.GetGuid(), \
+ ModuleObj.GetVersion(), \
+ ModuleObj.GetName(), \
+ ModuleObj.GetCombinePath())] = ModuleObj
+ PackageObj.SetModuleDict(ModuleDict)
+ except FatalError as ErrCode:
+ if ErrCode.message == EDK1_INF_ERROR:
+ Logger.Warn("UPT",
+ ST.WRN_EDK1_INF_FOUND%Filename)
+ else:
+ raise
+
+ self.PackageSurfaceArea\
+ [(PackageObj.GetGuid(), PackageObj.GetVersion(), \
+ PackageObj.GetCombinePath())] = PackageObj
+
+ #
+ # Get Modules
+ #
+ if ModuleList:
+ for ModuleFile in ModuleList:
+ ModuleFileFullPath = mws.join(Root, ModuleFile)
+ WorkspaceDir = mws.getWs(Root, ModuleFile)
+
+ try:
+ ModuleObj = InfPomAlignment(ModuleFileFullPath, WorkspaceDir)
+ ModuleKey = (ModuleObj.GetGuid(),
+ ModuleObj.GetVersion(),
+ ModuleObj.GetName(),
+ ModuleObj.GetCombinePath())
+ self.ModuleSurfaceArea[ModuleKey] = ModuleObj
+ except FatalError as ErrCode:
+ if ErrCode.message == EDK1_INF_ERROR:
+ Logger.Error("UPT",
+ EDK1_INF_ERROR,
+ ST.WRN_EDK1_INF_FOUND%ModuleFileFullPath,
+ ExtraData=ST.ERR_NOT_SUPPORTED_SA_MODULE)
+ else:
+ raise
+
+ # Recover WorkspaceDir
+ WorkspaceDir = Root
+
+ ## Get all files included for a distribution package, except tool/misc of
+ # distribution level
+ #
+ # @retval DistFileList A list of filepath for NonMetaDataFile, relative to workspace
+ # @retval MetaDataFileList A list of filepath for MetaDataFile, relative to workspace
+ #
+ def GetDistributionFileList(self):
+ MetaDataFileList = []
+ SkipModulesUniList = []
+
+ for Guid, Version, Path in self.PackageSurfaceArea:
+ Package = self.PackageSurfaceArea[Guid, Version, Path]
+ PackagePath = Package.GetPackagePath()
+ FullPath = Package.GetFullPath()
+ MetaDataFileList.append(Path)
+ IncludePathList = Package.GetIncludePathList()
+ for IncludePath in IncludePathList:
+ SearchPath = os.path.normpath(os.path.join(os.path.dirname(FullPath), IncludePath))
+ AddPath = os.path.normpath(os.path.join(PackagePath, IncludePath))
+ self.FileList += GetNonMetaDataFiles(SearchPath, ['CVS', '.svn'], False, AddPath)
+ #
+ # Add the miscellaneous files on DEC file
+ #
+ for MiscFileObj in Package.GetMiscFileList():
+ for FileObj in MiscFileObj.GetFileList():
+ MiscFileFullPath = os.path.normpath(os.path.join(PackagePath, FileObj.GetURI()))
+ if MiscFileFullPath not in self.FileList:
+ self.FileList.append(MiscFileFullPath)
+
+ Module = None
+ ModuleDict = Package.GetModuleDict()
+ for Guid, Version, Name, Path in ModuleDict:
+ Module = ModuleDict[Guid, Version, Name, Path]
+ ModulePath = Module.GetModulePath()
+ FullPath = Module.GetFullPath()
+ PkgRelPath = os.path.normpath(os.path.join(PackagePath, ModulePath))
+ MetaDataFileList.append(Path)
+ SkipList = ['CVS', '.svn']
+ NonMetaDataFileList = []
+ if Module.UniFileClassObject:
+ for UniFile in Module.UniFileClassObject.IncFileList:
+ OriPath = os.path.normpath(os.path.dirname(FullPath))
+ UniFilePath = os.path.normpath(os.path.join(PkgRelPath, UniFile.Path[len(OriPath) + 1:]))
+ if UniFilePath not in SkipModulesUniList:
+ SkipModulesUniList.append(UniFilePath)
+ for IncludeFile in Module.UniFileClassObject.IncludePathList:
+ if IncludeFile not in SkipModulesUniList:
+ SkipModulesUniList.append(IncludeFile)
+ NonMetaDataFileList = GetNonMetaDataFiles(os.path.dirname(FullPath), SkipList, False, PkgRelPath)
+ for NonMetaDataFile in NonMetaDataFileList:
+ if NonMetaDataFile not in self.FileList:
+ self.FileList.append(NonMetaDataFile)
+ for Guid, Version, Name, Path in self.ModuleSurfaceArea:
+ Module = self.ModuleSurfaceArea[Guid, Version, Name, Path]
+ ModulePath = Module.GetModulePath()
+ FullPath = Module.GetFullPath()
+ MetaDataFileList.append(Path)
+ SkipList = ['CVS', '.svn']
+ NonMetaDataFileList = []
+ if Module.UniFileClassObject:
+ for UniFile in Module.UniFileClassObject.IncFileList:
+ OriPath = os.path.normpath(os.path.dirname(FullPath))
+ UniFilePath = os.path.normpath(os.path.join(ModulePath, UniFile.Path[len(OriPath) + 1:]))
+ if UniFilePath not in SkipModulesUniList:
+ SkipModulesUniList.append(UniFilePath)
+ NonMetaDataFileList = GetNonMetaDataFiles(os.path.dirname(FullPath), SkipList, False, ModulePath)
+ for NonMetaDataFile in NonMetaDataFileList:
+ if NonMetaDataFile not in self.FileList:
+ self.FileList.append(NonMetaDataFile)
+
+ for SkipModuleUni in SkipModulesUniList:
+ if SkipModuleUni in self.FileList:
+ self.FileList.remove(SkipModuleUni)
+
+ return self.FileList, MetaDataFileList
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/FileHook.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/FileHook.py
new file mode 100755
index 00000000..433b4e3b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/FileHook.py
@@ -0,0 +1,193 @@
+## @file
+# This file hooks file and directory creation and removal
+#
+# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+File hook
+'''
+
+import os
+import stat
+import time
+import zipfile
+from time import sleep
+from Library import GlobalData
+
+__built_in_remove__ = os.remove
+__built_in_mkdir__ = os.mkdir
+__built_in_rmdir__ = os.rmdir
+__built_in_chmod__ = os.chmod
+__built_in_open__ = open
+
+_RMFILE = 0
+_MKFILE = 1
+_RMDIR = 2
+_MKDIR = 3
+_CHMOD = 4
+
+gBACKUPFILE = 'file.backup'
+gEXCEPTION_LIST = ['Conf'+os.sep+'DistributionPackageDatabase.db', '.tmp', gBACKUPFILE]
+
+class _PathInfo:
+ def __init__(self, action, path, mode=-1):
+ self.action = action
+ self.path = path
+ self.mode = mode
+
+class RecoverMgr:
+ def __init__(self, workspace):
+ self.rlist = []
+ self.zip = None
+ self.workspace = os.path.normpath(workspace)
+ self.backupfile = gBACKUPFILE
+ self.zipfile = os.path.join(self.workspace, gBACKUPFILE)
+
+ def _createzip(self):
+ if self.zip:
+ return
+ self.zip = zipfile.ZipFile(self.zipfile, 'w', zipfile.ZIP_DEFLATED)
+
+ def _save(self, tmp, path):
+ if not self._tryhook(path):
+ return
+ self.rlist.append(_PathInfo(tmp, path))
+
+ def bkrmfile(self, path):
+ arc = self._tryhook(path)
+ if arc and os.path.isfile(path):
+ self._createzip()
+ self.zip.write(path, arc.encode('utf_8'))
+ sta = os.stat(path)
+ oldmode = stat.S_IMODE(sta.st_mode)
+ self.rlist.append(_PathInfo(_CHMOD, path, oldmode))
+ self.rlist.append(_PathInfo(_RMFILE, path))
+ __built_in_remove__(path)
+
+ def bkmkfile(self, path, mode, bufsize):
+ if not os.path.exists(path):
+ self._save(_MKFILE, path)
+ return __built_in_open__(path, mode, bufsize)
+
+ def bkrmdir(self, path):
+ if os.path.exists(path):
+ sta = os.stat(path)
+ oldmode = stat.S_IMODE(sta.st_mode)
+ self.rlist.append(_PathInfo(_CHMOD, path, oldmode))
+ self._save(_RMDIR, path)
+ __built_in_rmdir__(path)
+
+ def bkmkdir(self, path, mode):
+ if not os.path.exists(path):
+ self._save(_MKDIR, path)
+ __built_in_mkdir__(path, mode)
+
+ def bkchmod(self, path, mode):
+ if self._tryhook(path) and os.path.exists(path):
+ sta = os.stat(path)
+ oldmode = stat.S_IMODE(sta.st_mode)
+ self.rlist.append(_PathInfo(_CHMOD, path, oldmode))
+ __built_in_chmod__(path, mode)
+
+ def rollback(self):
+ if self.zip:
+ self.zip.close()
+ self.zip = None
+ index = len(self.rlist) - 1
+ while index >= 0:
+ item = self.rlist[index]
+ exist = os.path.exists(item.path)
+ if item.action == _MKFILE and exist:
+ #if not os.access(item.path, os.W_OK):
+ # os.chmod(item.path, S_IWUSR)
+ __built_in_remove__(item.path)
+ elif item.action == _RMFILE and not exist:
+ if not self.zip:
+ self.zip = zipfile.ZipFile(self.zipfile, 'r', zipfile.ZIP_DEFLATED)
+ arcname = os.path.normpath(item.path)
+ arcname = arcname[len(self.workspace)+1:].encode('utf_8')
+ if os.sep != "/" and os.sep in arcname:
+ arcname = arcname.replace(os.sep, '/')
+ mtime = self.zip.getinfo(arcname).date_time
+ content = self.zip.read(arcname)
+ filep = __built_in_open__(item.path, "wb")
+ filep.write(content)
+ filep.close()
+ intime = time.mktime(mtime + (0, 0, 0))
+ os.utime(item.path, (intime, intime))
+ elif item.action == _MKDIR and exist:
+ while True:
+ try:
+ __built_in_rmdir__(item.path)
+ break
+ except IOError:
+ # Sleep a short time and try again
+ # The anti-virus software may delay the file removal in this directory
+ sleep(0.1)
+ elif item.action == _RMDIR and not exist:
+ __built_in_mkdir__(item.path)
+ elif item.action == _CHMOD and exist:
+ try:
+ __built_in_chmod__(item.path, item.mode)
+ except EnvironmentError:
+ pass
+ index -= 1
+ self.commit()
+
+ def commit(self):
+ if self.zip:
+ self.zip.close()
+ __built_in_remove__(self.zipfile)
+
+ # Check if path needs to be hooked
+ def _tryhook(self, path):
+ path = os.path.normpath(path)
+ works = self.workspace if str(self.workspace).endswith(os.sep) else (self.workspace + os.sep)
+ if not path.startswith(works):
+ return ''
+ for exceptdir in gEXCEPTION_LIST:
+ full = os.path.join(self.workspace, exceptdir)
+ if full == path or path.startswith(full + os.sep) or os.path.split(full)[0] == path:
+ return ''
+ return path[len(self.workspace)+1:]
+
+def _hookrm(path):
+ if GlobalData.gRECOVERMGR:
+ GlobalData.gRECOVERMGR.bkrmfile(path)
+ else:
+ __built_in_remove__(path)
+
+def _hookmkdir(path, mode=0o777):
+ if GlobalData.gRECOVERMGR:
+ GlobalData.gRECOVERMGR.bkmkdir(path, mode)
+ else:
+ __built_in_mkdir__(path, mode)
+
+def _hookrmdir(path):
+ if GlobalData.gRECOVERMGR:
+ GlobalData.gRECOVERMGR.bkrmdir(path)
+ else:
+ __built_in_rmdir__(path)
+
+def _hookmkfile(path, mode='r', bufsize=-1):
+ if GlobalData.gRECOVERMGR:
+ return GlobalData.gRECOVERMGR.bkmkfile(path, mode, bufsize)
+ return __built_in_open__(path, mode, bufsize)
+
+def _hookchmod(path, mode):
+ if GlobalData.gRECOVERMGR:
+ GlobalData.gRECOVERMGR.bkchmod(path, mode)
+ else:
+ __built_in_chmod__(path, mode)
+
+def SetRecoverMgr(mgr):
+ GlobalData.gRECOVERMGR = mgr
+
+os.remove = _hookrm
+os.mkdir = _hookmkdir
+os.rmdir = _hookrmdir
+os.chmod = _hookchmod
+__FileHookOpen__ = _hookmkfile
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/IpiDb.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/IpiDb.py
new file mode 100755
index 00000000..7699a9c5
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/IpiDb.py
@@ -0,0 +1,922 @@
+## @file
+# This file is for installed package information database operations
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+IpiDb
+'''
+
+##
+# Import Modules
+#
+import sqlite3
+import os.path
+import time
+
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger.ToolError import UPT_ALREADY_RUNNING_ERROR
+from Logger.ToolError import UPT_DB_UPDATE_ERROR
+import platform as pf
+
+## IpiDb
+#
+# This class represents the installed package information database
+# Add/Remove/Get installed distribution package information here.
+#
+#
+# @param object: Inherited from object class
+# @param DbPath: A string for the path of the database
+#
+#
+class IpiDatabase(object):
+ def __init__(self, DbPath, Workspace):
+ Dir = os.path.dirname(DbPath)
+ if not os.path.isdir(Dir):
+ os.mkdir(Dir)
+ self.Conn = sqlite3.connect(u''.join(DbPath), isolation_level='DEFERRED')
+ self.Conn.execute("PRAGMA page_size=4096")
+ self.Conn.execute("PRAGMA synchronous=OFF")
+ self.Cur = self.Conn.cursor()
+ self.DpTable = 'DpInfo'
+ self.PkgTable = 'PkgInfo'
+ self.ModInPkgTable = 'ModInPkgInfo'
+ self.StandaloneModTable = 'StandaloneModInfo'
+ self.ModDepexTable = 'ModDepexInfo'
+ self.DpFileListTable = 'DpFileListInfo'
+ self.DummyTable = 'Dummy'
+ self.Workspace = os.path.normpath(Workspace)
+
+ ## Initialize build database
+ #
+ #
+ def InitDatabase(self, SkipLock = False):
+ Logger.Verbose(ST.MSG_INIT_IPI_START)
+ if not SkipLock:
+ try:
+ #
+ # Create a dummy table, if already existed,
+ # then UPT is already running
+ #
+ SqlCommand = """
+ create table %s (
+ Dummy TEXT NOT NULL,
+ PRIMARY KEY (Dummy)
+ )""" % self.DummyTable
+ self.Cur.execute(SqlCommand)
+ self.Conn.commit()
+ except sqlite3.OperationalError:
+ Logger.Error("UPT",
+ UPT_ALREADY_RUNNING_ERROR,
+ ST.ERR_UPT_ALREADY_RUNNING_ERROR
+ )
+
+ #
+ # Create new table
+ #
+ SqlCommand = """
+ create table IF NOT EXISTS %s (
+ DpGuid TEXT NOT NULL,DpVersion TEXT NOT NULL,
+ InstallTime REAL NOT NULL,
+ NewPkgFileName TEXT NOT NULL,
+ PkgFileName TEXT NOT NULL,
+ RePackage TEXT NOT NULL,
+ PRIMARY KEY (DpGuid, DpVersion)
+ )""" % self.DpTable
+ self.Cur.execute(SqlCommand)
+
+ SqlCommand = """
+ create table IF NOT EXISTS %s (
+ FilePath TEXT NOT NULL,
+ DpGuid TEXT,
+ DpVersion TEXT,
+ Md5Sum TEXT,
+ PRIMARY KEY (FilePath)
+ )""" % self.DpFileListTable
+ self.Cur.execute(SqlCommand)
+
+ SqlCommand = """
+ create table IF NOT EXISTS %s (
+ PackageGuid TEXT NOT NULL,
+ PackageVersion TEXT NOT NULL,
+ InstallTime REAL NOT NULL,
+ DpGuid TEXT,
+ DpVersion TEXT,
+ InstallPath TEXT NOT NULL,
+ PRIMARY KEY (PackageGuid, PackageVersion, InstallPath)
+ )""" % self.PkgTable
+ self.Cur.execute(SqlCommand)
+
+ SqlCommand = """
+ create table IF NOT EXISTS %s (
+ ModuleGuid TEXT NOT NULL,
+ ModuleVersion TEXT NOT NULL,
+ ModuleName TEXT NOT NULL,
+ InstallTime REAL NOT NULL,
+ PackageGuid TEXT,
+ PackageVersion TEXT,
+ InstallPath TEXT NOT NULL,
+ PRIMARY KEY (ModuleGuid, ModuleVersion, ModuleName, InstallPath)
+ )""" % self.ModInPkgTable
+ self.Cur.execute(SqlCommand)
+
+ SqlCommand = """
+ create table IF NOT EXISTS %s (
+ ModuleGuid TEXT NOT NULL,
+ ModuleVersion TEXT NOT NULL,
+ ModuleName TEXT NOT NULL,
+ InstallTime REAL NOT NULL,
+ DpGuid TEXT,
+ DpVersion TEXT,
+ InstallPath TEXT NOT NULL,
+ PRIMARY KEY (ModuleGuid, ModuleVersion, ModuleName, InstallPath)
+ )""" % self.StandaloneModTable
+ self.Cur.execute(SqlCommand)
+
+ SqlCommand = """
+ create table IF NOT EXISTS %s (
+ ModuleGuid TEXT NOT NULL,
+ ModuleVersion TEXT NOT NULL,
+ ModuleName TEXT NOT NULL,
+ InstallPath TEXT NOT NULL,
+ DepexGuid TEXT,
+ DepexVersion TEXT
+ )""" % self.ModDepexTable
+ self.Cur.execute(SqlCommand)
+
+ self.Conn.commit()
+
+ Logger.Verbose(ST.MSG_INIT_IPI_FINISH)
+
+ def RollBack(self):
+ self.Conn.rollback()
+
+ def Commit(self):
+ self.Conn.commit()
+
+ ## Add a distribution install information from DpObj
+ #
+ # @param DpObj:
+ # @param NewDpPkgFileName: New DpPkg File Name
+ # @param DpPkgFileName: DpPkg File Name
+ # @param RePackage: A RePackage
+ #
+ def AddDPObject(self, DpObj, NewDpPkgFileName, DpPkgFileName, RePackage):
+ try:
+ for PkgKey in DpObj.PackageSurfaceArea.keys():
+ PkgGuid = PkgKey[0]
+ PkgVersion = PkgKey[1]
+ PkgInstallPath = PkgKey[2]
+ self._AddPackage(PkgGuid, PkgVersion, DpObj.Header.GetGuid(), \
+ DpObj.Header.GetVersion(), PkgInstallPath)
+ PkgObj = DpObj.PackageSurfaceArea[PkgKey]
+ for ModKey in PkgObj.GetModuleDict().keys():
+ ModGuid = ModKey[0]
+ ModVersion = ModKey[1]
+ ModName = ModKey[2]
+ ModInstallPath = ModKey[3]
+ ModInstallPath = \
+ os.path.normpath(os.path.join(PkgInstallPath, ModInstallPath))
+ self._AddModuleInPackage(ModGuid, ModVersion, ModName, PkgGuid, \
+ PkgVersion, ModInstallPath)
+ ModObj = PkgObj.GetModuleDict()[ModKey]
+ for Dep in ModObj.GetPackageDependencyList():
+ DepexGuid = Dep.GetGuid()
+ DepexVersion = Dep.GetVersion()
+ self._AddModuleDepex(ModGuid, ModVersion, ModName, ModInstallPath, \
+ DepexGuid, DepexVersion)
+ for (FilePath, Md5Sum) in PkgObj.FileList:
+ self._AddDpFilePathList(DpObj.Header.GetGuid(), \
+ DpObj.Header.GetVersion(), FilePath, \
+ Md5Sum)
+
+ for ModKey in DpObj.ModuleSurfaceArea.keys():
+ ModGuid = ModKey[0]
+ ModVersion = ModKey[1]
+ ModName = ModKey[2]
+ ModInstallPath = ModKey[3]
+ self._AddStandaloneModule(ModGuid, ModVersion, ModName, \
+ DpObj.Header.GetGuid(), \
+ DpObj.Header.GetVersion(), \
+ ModInstallPath)
+ ModObj = DpObj.ModuleSurfaceArea[ModKey]
+ for Dep in ModObj.GetPackageDependencyList():
+ DepexGuid = Dep.GetGuid()
+ DepexVersion = Dep.GetVersion()
+ self._AddModuleDepex(ModGuid, ModVersion, ModName, ModInstallPath, \
+ DepexGuid, DepexVersion)
+ for (Path, Md5Sum) in ModObj.FileList:
+ self._AddDpFilePathList(DpObj.Header.GetGuid(), \
+ DpObj.Header.GetVersion(), \
+ Path, Md5Sum)
+
+ #
+ # add tool/misc files
+ #
+ for (Path, Md5Sum) in DpObj.FileList:
+ self._AddDpFilePathList(DpObj.Header.GetGuid(), \
+ DpObj.Header.GetVersion(), Path, Md5Sum)
+
+ self._AddDp(DpObj.Header.GetGuid(), DpObj.Header.GetVersion(), \
+ NewDpPkgFileName, DpPkgFileName, RePackage)
+
+ except sqlite3.IntegrityError as DetailMsg:
+ Logger.Error("UPT",
+ UPT_DB_UPDATE_ERROR,
+ ST.ERR_UPT_DB_UPDATE_ERROR,
+ ExtraData = DetailMsg
+ )
+
+ ## Add a distribution install information
+ #
+ # @param Guid Guid of the distribution package
+ # @param Version Version of the distribution package
+ # @param NewDpFileName the saved filename of distribution package file
+ # @param DistributionFileName the filename of distribution package file
+ #
+ def _AddDp(self, Guid, Version, NewDpFileName, DistributionFileName, \
+ RePackage):
+
+ if Version is None or len(Version.strip()) == 0:
+ Version = 'N/A'
+
+ #
+ # Add newly installed DP information to DB.
+ #
+ if NewDpFileName is None or len(NewDpFileName.strip()) == 0:
+ PkgFileName = 'N/A'
+ else:
+ PkgFileName = NewDpFileName
+ CurrentTime = time.time()
+ SqlCommand = \
+ """insert into %s values('%s', '%s', %s, '%s', '%s', '%s')""" % \
+ (self.DpTable, Guid, Version, CurrentTime, PkgFileName, \
+ DistributionFileName, str(RePackage).upper())
+ self.Cur.execute(SqlCommand)
+
+
+ ## Add a file list from DP
+ #
+ # @param DpGuid: A DpGuid
+ # @param DpVersion: A DpVersion
+ # @param Path: A Path
+ # @param Path: A Md5Sum
+ #
+ def _AddDpFilePathList(self, DpGuid, DpVersion, Path, Md5Sum):
+ Path = os.path.normpath(Path)
+ if pf.system() == 'Windows':
+ if Path.startswith(self.Workspace):
+ Path = Path[len(self.Workspace):]
+ else:
+ if Path.startswith(self.Workspace + os.sep):
+ Path = Path[len(self.Workspace)+1:]
+ SqlCommand = """insert into %s values('%s', '%s', '%s', '%s')""" % \
+ (self.DpFileListTable, Path, DpGuid, DpVersion, Md5Sum)
+
+ self.Cur.execute(SqlCommand)
+
+ ## Add a package install information
+ #
+ # @param Guid: A package guid
+ # @param Version: A package version
+ # @param DpGuid: A DpGuid
+ # @param DpVersion: A DpVersion
+ # @param Path: A Path
+ #
+ def _AddPackage(self, Guid, Version, DpGuid=None, DpVersion=None, Path=''):
+
+ if Version is None or len(Version.strip()) == 0:
+ Version = 'N/A'
+
+ if DpGuid is None or len(DpGuid.strip()) == 0:
+ DpGuid = 'N/A'
+
+ if DpVersion is None or len(DpVersion.strip()) == 0:
+ DpVersion = 'N/A'
+
+ #
+ # Add newly installed package information to DB.
+ #
+ CurrentTime = time.time()
+ SqlCommand = \
+ """insert into %s values('%s', '%s', %s, '%s', '%s', '%s')""" % \
+ (self.PkgTable, Guid, Version, CurrentTime, DpGuid, DpVersion, Path)
+ self.Cur.execute(SqlCommand)
+
+ ## Add a module that from a package install information
+ #
+ # @param Guid: Module Guid
+ # @param Version: Module version
+ # @param Name: Module Name
+ # @param PkgGuid: Package Guid
+ # @param PkgVersion: Package version
+ # @param Path: Package relative path that module installs
+ #
+ def _AddModuleInPackage(self, Guid, Version, Name, PkgGuid=None, \
+ PkgVersion=None, Path=''):
+
+ if Version is None or len(Version.strip()) == 0:
+ Version = 'N/A'
+
+ if PkgGuid is None or len(PkgGuid.strip()) == 0:
+ PkgGuid = 'N/A'
+
+ if PkgVersion is None or len(PkgVersion.strip()) == 0:
+ PkgVersion = 'N/A'
+
+ if os.name == 'posix':
+ Path = Path.replace('\\', os.sep)
+ else:
+ Path = Path.replace('/', os.sep)
+
+ #
+ # Add module from package information to DB.
+ #
+ CurrentTime = time.time()
+ SqlCommand = \
+ """insert into %s values('%s', '%s', '%s', %s, '%s', '%s', '%s')""" % \
+ (self.ModInPkgTable, Guid, Version, Name, CurrentTime, PkgGuid, PkgVersion, \
+ Path)
+ self.Cur.execute(SqlCommand)
+
+ ## Add a module that is standalone install information
+ #
+ # @param Guid: a module Guid
+ # @param Version: a module Version
+ # @param Name: a module name
+ # @param DpGuid: a DpGuid
+ # @param DpVersion: a DpVersion
+ # @param Path: path
+ #
+ def _AddStandaloneModule(self, Guid, Version, Name, DpGuid=None, \
+ DpVersion=None, Path=''):
+
+ if Version is None or len(Version.strip()) == 0:
+ Version = 'N/A'
+
+ if DpGuid is None or len(DpGuid.strip()) == 0:
+ DpGuid = 'N/A'
+
+ if DpVersion is None or len(DpVersion.strip()) == 0:
+ DpVersion = 'N/A'
+
+ #
+ # Add module standalone information to DB.
+ #
+ CurrentTime = time.time()
+ SqlCommand = \
+ """insert into %s values('%s', '%s', '%s', %s, '%s', '%s', '%s')""" % \
+ (self.StandaloneModTable, Guid, Version, Name, CurrentTime, DpGuid, \
+ DpVersion, Path)
+ self.Cur.execute(SqlCommand)
+
+ ## Add a module depex
+ #
+ # @param Guid: a module Guid
+ # @param Version: a module Version
+ # @param Name: a module name
+ # @param DepexGuid: a module DepexGuid
+ # @param DepexVersion: a module DepexVersion
+ #
+ def _AddModuleDepex(self, Guid, Version, Name, Path, DepexGuid=None, \
+ DepexVersion=None):
+
+ if DepexGuid is None or len(DepexGuid.strip()) == 0:
+ DepexGuid = 'N/A'
+
+ if DepexVersion is None or len(DepexVersion.strip()) == 0:
+ DepexVersion = 'N/A'
+
+ if os.name == 'posix':
+ Path = Path.replace('\\', os.sep)
+ else:
+ Path = Path.replace('/', os.sep)
+
+ #
+ # Add module depex information to DB.
+ #
+ SqlCommand = """insert into %s values('%s', '%s', '%s', '%s', '%s', '%s')"""\
+ % (self.ModDepexTable, Guid, Version, Name, Path, DepexGuid, DepexVersion)
+ self.Cur.execute(SqlCommand)
+
+ ## Remove a distribution install information, if no version specified,
+ # remove all DPs with this Guid.
+ #
+ # @param DpGuid: guid of dpex
+ # @param DpVersion: version of dpex
+ #
+ def RemoveDpObj(self, DpGuid, DpVersion):
+
+ PkgList = self.GetPackageListFromDp(DpGuid, DpVersion)
+ #
+ # delete from ModDepex the standalone module's dependency
+ #
+ SqlCommand = \
+ """delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
+ (select ModuleGuid from StandaloneModInfo as B where B.DpGuid = '%s'
+ and B.DpVersion = '%s')
+ and ModDepexInfo.ModuleVersion in
+ (select ModuleVersion from StandaloneModInfo as B
+ where B.DpGuid = '%s' and B.DpVersion = '%s')
+ and ModDepexInfo.ModuleName in
+ (select ModuleName from StandaloneModInfo as B
+ where B.DpGuid = '%s' and B.DpVersion = '%s')
+ and ModDepexInfo.InstallPath in
+ (select InstallPath from StandaloneModInfo as B
+ where B.DpGuid = '%s' and B.DpVersion = '%s') """ % \
+ (DpGuid, DpVersion, DpGuid, DpVersion, DpGuid, DpVersion, DpGuid, DpVersion)
+
+ self.Cur.execute(SqlCommand)
+ #
+ # delete from ModDepex the from pkg module's dependency
+ #
+ for Pkg in PkgList:
+
+ SqlCommand = \
+ """delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
+ (select ModuleGuid from ModInPkgInfo
+ where ModInPkgInfo.PackageGuid ='%s' and
+ ModInPkgInfo.PackageVersion = '%s')
+ and ModDepexInfo.ModuleVersion in
+ (select ModuleVersion from ModInPkgInfo
+ where ModInPkgInfo.PackageGuid ='%s' and
+ ModInPkgInfo.PackageVersion = '%s')
+ and ModDepexInfo.ModuleName in
+ (select ModuleName from ModInPkgInfo
+ where ModInPkgInfo.PackageGuid ='%s' and
+ ModInPkgInfo.PackageVersion = '%s')
+ and ModDepexInfo.InstallPath in
+ (select InstallPath from ModInPkgInfo where
+ ModInPkgInfo.PackageGuid ='%s'
+ and ModInPkgInfo.PackageVersion = '%s')""" \
+ % (Pkg[0], Pkg[1], Pkg[0], Pkg[1], Pkg[0], Pkg[1], Pkg[0], Pkg[1])
+
+ self.Cur.execute(SqlCommand)
+ #
+ # delete the standalone module
+ #
+ SqlCommand = \
+ """delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
+ (self.StandaloneModTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+ #
+ # delete the from pkg module
+ #
+ for Pkg in PkgList:
+ SqlCommand = \
+ """delete from %s where %s.PackageGuid ='%s'
+ and %s.PackageVersion = '%s'""" % \
+ (self.ModInPkgTable, self.ModInPkgTable, Pkg[0], \
+ self.ModInPkgTable, Pkg[1])
+ self.Cur.execute(SqlCommand)
+ #
+ # delete packages
+ #
+ SqlCommand = \
+ """delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
+ (self.PkgTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+ #
+ # delete file list from DP
+ #
+ SqlCommand = \
+ """delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
+ (self.DpFileListTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+ #
+ # delete DP
+ #
+ SqlCommand = \
+ """delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
+ (self.DpTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ #self.Conn.commit()
+
+ ## Get a list of distribution install information.
+ #
+ # @param Guid: distribution package guid
+ # @param Version: distribution package version
+ #
+ def GetDp(self, Guid, Version):
+
+ if Version is None or len(Version.strip()) == 0:
+ Version = 'N/A'
+ Logger.Verbose(ST.MSG_GET_DP_INSTALL_LIST)
+ (DpGuid, DpVersion) = (Guid, Version)
+ SqlCommand = """select * from %s where DpGuid ='%s'""" % \
+ (self.DpTable, DpGuid)
+ self.Cur.execute(SqlCommand)
+
+ else:
+ Logger.Verbose(ST.MSG_GET_DP_INSTALL_INFO_START)
+ (DpGuid, DpVersion) = (Guid, Version)
+ SqlCommand = \
+ """select * from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
+ (self.DpTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ DpList = []
+ for DpInfo in self.Cur:
+ DpGuid = DpInfo[0]
+ DpVersion = DpInfo[1]
+ InstallTime = DpInfo[2]
+ PkgFileName = DpInfo[3]
+ DpList.append((DpGuid, DpVersion, InstallTime, PkgFileName))
+
+ Logger.Verbose(ST.MSG_GET_DP_INSTALL_INFO_FINISH)
+ return DpList
+
+ ## Get a list of distribution install dirs
+ #
+ # @param Guid: distribution package guid
+ # @param Version: distribution package version
+ #
+ def GetDpInstallDirList(self, Guid, Version):
+ SqlCommand = """select InstallPath from PkgInfo where DpGuid = '%s' and DpVersion = '%s'""" % (Guid, Version)
+ self.Cur.execute(SqlCommand)
+ DirList = []
+ for Result in self.Cur:
+ if Result[0] not in DirList:
+ DirList.append(Result[0])
+
+ SqlCommand = """select InstallPath from StandaloneModInfo where DpGuid = '%s' and DpVersion = '%s'""" % \
+ (Guid, Version)
+ self.Cur.execute(SqlCommand)
+ for Result in self.Cur:
+ if Result[0] not in DirList:
+ DirList.append(Result[0])
+
+ return DirList
+
+
+ ## Get a list of distribution install file path information.
+ #
+ # @param Guid: distribution package guid
+ # @param Version: distribution package version
+ #
+ def GetDpFileList(self, Guid, Version):
+
+ (DpGuid, DpVersion) = (Guid, Version)
+ SqlCommand = \
+ """select * from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
+ (self.DpFileListTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ PathList = []
+ for Result in self.Cur:
+ Path = Result[0]
+ Md5Sum = Result[3]
+ PathList.append((os.path.join(self.Workspace, Path), Md5Sum))
+
+ return PathList
+
+ ## Get files' repackage attribute if present that are installed into current workspace
+ #
+ # @retval FileDict: a Dict of file, key is file path, value is (DpGuid, DpVersion, NewDpFileName, RePackage)
+ #
+ def GetRePkgDict(self):
+ SqlCommand = """select * from %s """ % (self.DpTable)
+ self.Cur.execute(SqlCommand)
+
+ DpInfoList = []
+ for Result in self.Cur:
+ DpInfoList.append(Result)
+
+ FileDict = {}
+ for Result in DpInfoList:
+ DpGuid = Result[0]
+ DpVersion = Result[1]
+ NewDpFileName = Result[3]
+ RePackage = Result[5]
+ if RePackage == 'TRUE':
+ RePackage = True
+ else:
+ RePackage = False
+ for FileInfo in self.GetDpFileList(DpGuid, DpVersion):
+ PathInfo = FileInfo[0]
+ FileDict[PathInfo] = DpGuid, DpVersion, NewDpFileName, RePackage
+
+ return FileDict
+
+ ## Get (Guid, Version) from distribution file name information.
+ #
+ # @param DistributionFile: Distribution File
+ #
+ def GetDpByName(self, DistributionFile):
+ SqlCommand = """select * from %s where NewPkgFileName = '%s'""" % \
+ (self.DpTable, DistributionFile)
+ self.Cur.execute(SqlCommand)
+
+ for Result in self.Cur:
+ DpGuid = Result[0]
+ DpVersion = Result[1]
+ NewDpFileName = Result[3]
+
+ return (DpGuid, DpVersion, NewDpFileName)
+ else:
+ return (None, None, None)
+
+ ## Get a list of package information.
+ #
+ # @param Guid: package guid
+ # @param Version: package version
+ #
+ def GetPackage(self, Guid, Version, DpGuid='', DpVersion=''):
+
+ if DpVersion == '' or DpGuid == '':
+
+ (PackageGuid, PackageVersion) = (Guid, Version)
+ SqlCommand = """select * from %s where PackageGuid ='%s'
+ and PackageVersion = '%s'""" % (self.PkgTable, PackageGuid, \
+ PackageVersion)
+ self.Cur.execute(SqlCommand)
+
+ elif Version is None or len(Version.strip()) == 0:
+
+ SqlCommand = """select * from %s where PackageGuid ='%s'""" % \
+ (self.PkgTable, Guid)
+ self.Cur.execute(SqlCommand)
+ else:
+ (PackageGuid, PackageVersion) = (Guid, Version)
+ SqlCommand = """select * from %s where PackageGuid ='%s' and
+ PackageVersion = '%s'
+ and DpGuid = '%s' and DpVersion = '%s'""" % \
+ (self.PkgTable, PackageGuid, PackageVersion, \
+ DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ PkgList = []
+ for PkgInfo in self.Cur:
+ PkgGuid = PkgInfo[0]
+ PkgVersion = PkgInfo[1]
+ InstallTime = PkgInfo[2]
+ InstallPath = PkgInfo[5]
+ PkgList.append((PkgGuid, PkgVersion, InstallTime, DpGuid, \
+ DpVersion, InstallPath))
+
+ return PkgList
+
+
+ ## Get a list of module in package information.
+ #
+ # @param Guid: A module guid
+ # @param Version: A module version
+ #
+ def GetModInPackage(self, Guid, Version, Name, Path, PkgGuid='', PkgVersion=''):
+ (ModuleGuid, ModuleVersion, ModuleName, InstallPath) = (Guid, Version, Name, Path)
+ if PkgVersion == '' or PkgGuid == '':
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and
+ ModuleVersion = '%s' and InstallPath = '%s'
+ and ModuleName = '%s'""" % (self.ModInPkgTable, ModuleGuid, \
+ ModuleVersion, InstallPath, ModuleName)
+ self.Cur.execute(SqlCommand)
+ else:
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and
+ ModuleVersion = '%s' and InstallPath = '%s'
+ and ModuleName = '%s' and PackageGuid ='%s'
+ and PackageVersion = '%s'
+ """ % (self.ModInPkgTable, ModuleGuid, \
+ ModuleVersion, InstallPath, ModuleName, PkgGuid, PkgVersion)
+ self.Cur.execute(SqlCommand)
+
+ ModList = []
+ for ModInfo in self.Cur:
+ ModGuid = ModInfo[0]
+ ModVersion = ModInfo[1]
+ InstallTime = ModInfo[2]
+ InstallPath = ModInfo[5]
+ ModList.append((ModGuid, ModVersion, InstallTime, PkgGuid, \
+ PkgVersion, InstallPath))
+
+ return ModList
+
+ ## Get a list of module standalone.
+ #
+ # @param Guid: A module guid
+ # @param Version: A module version
+ #
+ def GetStandaloneModule(self, Guid, Version, Name, Path, DpGuid='', DpVersion=''):
+ (ModuleGuid, ModuleVersion, ModuleName, InstallPath) = (Guid, Version, Name, Path)
+ if DpGuid == '':
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and
+ ModuleVersion = '%s' and InstallPath = '%s'
+ and ModuleName = '%s'""" % (self.StandaloneModTable, ModuleGuid, \
+ ModuleVersion, InstallPath, ModuleName)
+ self.Cur.execute(SqlCommand)
+
+ else:
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and
+ ModuleVersion = '%s' and InstallPath = '%s' and ModuleName = '%s' and DpGuid ='%s' and DpVersion = '%s'
+ """ % (self.StandaloneModTable, ModuleGuid, \
+ ModuleVersion, ModuleName, InstallPath, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ ModList = []
+ for ModInfo in self.Cur:
+ ModGuid = ModInfo[0]
+ ModVersion = ModInfo[1]
+ InstallTime = ModInfo[2]
+ InstallPath = ModInfo[5]
+ ModList.append((ModGuid, ModVersion, InstallTime, DpGuid, \
+ DpVersion, InstallPath))
+
+ return ModList
+
+ ## Get a list of module information that comes from DP.
+ #
+ # @param DpGuid: A Distribution Guid
+ # @param DpVersion: A Distribution version
+ #
+ def GetSModInsPathListFromDp(self, DpGuid, DpVersion):
+
+ PathList = []
+ SqlCommand = """select InstallPath from %s where DpGuid ='%s'
+ and DpVersion = '%s'
+ """ % (self.StandaloneModTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ for Result in self.Cur:
+ InstallPath = Result[0]
+ PathList.append(InstallPath)
+
+ return PathList
+
+ ## Get a list of package information.
+ #
+ # @param DpGuid: A Distribution Guid
+ # @param DpVersion: A Distribution version
+ #
+ def GetPackageListFromDp(self, DpGuid, DpVersion):
+
+ SqlCommand = """select * from %s where DpGuid ='%s' and
+ DpVersion = '%s' """ % (self.PkgTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ PkgList = []
+ for PkgInfo in self.Cur:
+ PkgGuid = PkgInfo[0]
+ PkgVersion = PkgInfo[1]
+ InstallPath = PkgInfo[5]
+ PkgList.append((PkgGuid, PkgVersion, InstallPath))
+
+ return PkgList
+
+ ## Get a list of modules that depends on package information from a DP.
+ #
+ # @param DpGuid: A Distribution Guid
+ # @param DpVersion: A Distribution version
+ #
+ def GetDpDependentModuleList(self, DpGuid, DpVersion):
+
+ ModList = []
+ PkgList = self.GetPackageListFromDp(DpGuid, DpVersion)
+ if len(PkgList) > 0:
+ return ModList
+
+ for Pkg in PkgList:
+ #
+ # get all in-package modules that depends on current
+ # Pkg (Guid match, Version match or NA) but not belong to
+ # current Pkg
+ #
+ SqlCommand = """select t1.ModuleGuid, t1.ModuleVersion,
+ t1.InstallPath from %s as t1, %s as t2 where
+ t1.ModuleGuid = t2.ModuleGuid and
+ t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s'
+ and (t2.DepexVersion = '%s' or t2.DepexVersion = 'N/A') and
+ t1.PackageGuid != '%s' and t1.PackageVersion != '%s'
+ """ % (self.ModInPkgTable, \
+ self.ModDepexTable, Pkg[0], Pkg[1], Pkg[0], \
+ Pkg[1])
+ self.Cur.execute(SqlCommand)
+ for ModInfo in self.Cur:
+ ModGuid = ModInfo[0]
+ ModVersion = ModInfo[1]
+ InstallPath = ModInfo[2]
+ ModList.append((ModGuid, ModVersion, InstallPath))
+
+ #
+ # get all modules from standalone modules that depends on current
+ #Pkg (Guid match, Version match or NA) but not in current dp
+ #
+ SqlCommand = \
+ """select t1.ModuleGuid, t1.ModuleVersion, t1.InstallPath
+ from %s as t1, %s as t2 where t1.ModuleGuid = t2.ModuleGuid and
+ t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s'
+ and (t2.DepexVersion = '%s' or t2.DepexVersion = 'N/A') and
+ t1.DpGuid != '%s' and t1.DpVersion != '%s'
+ """ % \
+ (self.StandaloneModTable, self.ModDepexTable, Pkg[0], \
+ Pkg[1], DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+ for ModInfo in self.Cur:
+ ModGuid = ModInfo[0]
+ ModVersion = ModInfo[1]
+ InstallPath = ModInfo[2]
+ ModList.append((ModGuid, ModVersion, InstallPath))
+
+
+ return ModList
+
+ ## Get Dp's list of modules.
+ #
+ # @param DpGuid: A Distribution Guid
+ # @param DpVersion: A Distribution version
+ #
+ def GetDpModuleList(self, DpGuid, DpVersion):
+ ModList = []
+ #
+ # get Dp module list from the DpFileList table
+ #
+ SqlCommand = """select FilePath
+ from %s
+ where DpGuid = '%s' and DpVersion = '%s' and
+ FilePath like '%%.inf'
+ """ % (self.DpFileListTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+ for ModuleInfo in self.Cur:
+ FilePath = ModuleInfo[0]
+ ModList.append(os.path.join(self.Workspace, FilePath))
+
+ return ModList
+
+
+ ## Get a module depex
+ #
+ # @param DpGuid: A module Guid
+ # @param DpVersion: A module version
+ # @param Path:
+ #
+ def GetModuleDepex(self, Guid, Version, Path):
+
+ #
+ # Get module depex information to DB.
+ #
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and
+ ModuleVersion = '%s' and InstallPath ='%s'
+ """ % (self.ModDepexTable, Guid, Version, Path)
+ self.Cur.execute(SqlCommand)
+
+
+ DepexList = []
+ for DepInfo in self.Cur:
+ DepexGuid = DepInfo[3]
+ DepexVersion = DepInfo[4]
+ DepexList.append((DepexGuid, DepexVersion))
+
+ return DepexList
+
+ ## Inventory the distribution installed to current workspace
+ #
+ # Inventory the distribution installed to current workspace
+ #
+ def InventoryDistInstalled(self):
+ SqlCommand = """select * from %s """ % (self.DpTable)
+ self.Cur.execute(SqlCommand)
+
+ DpInfoList = []
+ for Result in self.Cur:
+ DpGuid = Result[0]
+ DpVersion = Result[1]
+ DpAliasName = Result[3]
+ DpFileName = Result[4]
+ DpInfoList.append((DpGuid, DpVersion, DpFileName, DpAliasName))
+
+ return DpInfoList
+
+ ## Close entire database
+ #
+ # Close the connection and cursor
+ #
+ def CloseDb(self):
+ #
+ # drop the dummy table
+ #
+ SqlCommand = """
+ drop table IF EXISTS %s
+ """ % self.DummyTable
+ self.Cur.execute(SqlCommand)
+ self.Conn.commit()
+
+ self.Cur.close()
+ self.Conn.close()
+
+ ## Convert To Sql String
+ #
+ # 1. Replace "'" with "''" in each item of StringList
+ #
+ # @param StringList: A list for strings to be converted
+ #
+ def __ConvertToSqlString(self, StringList):
+ if self.DpTable:
+ pass
+ return list(map(lambda s: s.replace("'", "''"), StringList))
+
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/PackageFile.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/PackageFile.py
new file mode 100755
index 00000000..c45ca7d7
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/PackageFile.py
@@ -0,0 +1,250 @@
+## @file
+#
+# PackageFile class represents the zip file of a distribution package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+PackageFile
+'''
+
+##
+# Import Modules
+#
+import os.path
+import zipfile
+import tempfile
+import platform
+
+from Logger.ToolError import FILE_OPEN_FAILURE
+from Logger.ToolError import FILE_CHECKSUM_FAILURE
+from Logger.ToolError import FILE_NOT_FOUND
+from Logger.ToolError import FILE_DECOMPRESS_FAILURE
+from Logger.ToolError import FILE_UNKNOWN_ERROR
+from Logger.ToolError import FILE_WRITE_FAILURE
+from Logger.ToolError import FILE_COMPRESS_FAILURE
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Library.Misc import CreateDirectory
+from Library.Misc import RemoveDirectory
+from Core.FileHook import __FileHookOpen__
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+
+
+class PackageFile:
+ def __init__(self, FileName, Mode="r"):
+ self._FileName = FileName
+ if Mode not in ["r", "w", "a"]:
+ Mode = "r"
+ try:
+ self._ZipFile = zipfile.ZipFile(FileName, Mode, \
+ zipfile.ZIP_DEFLATED)
+ self._Files = {}
+ for Filename in self._ZipFile.namelist():
+ self._Files[os.path.normpath(Filename)] = Filename
+ except BaseException as Xstr:
+ Logger.Error("PackagingTool", FILE_OPEN_FAILURE,
+ ExtraData="%s (%s)" % (FileName, str(Xstr)))
+
+ BadFile = self._ZipFile.testzip()
+ if BadFile is not None:
+ Logger.Error("PackagingTool", FILE_CHECKSUM_FAILURE,
+ ExtraData="[%s] in %s" % (BadFile, FileName))
+
+ def GetZipFile(self):
+ return self._ZipFile
+
+ ## Get file name
+ #
+ def __str__(self):
+ return self._FileName
+
+ ## Extract the file
+ #
+ # @param To: the destination file
+ #
+ def Unpack(self, ToDest):
+ for FileN in self._ZipFile.namelist():
+ ToFile = os.path.normpath(os.path.join(ToDest, FileN))
+ Msg = "%s -> %s" % (FileN, ToFile)
+ Logger.Info(Msg)
+ self.Extract(FileN, ToFile)
+
+ ## Extract the file
+ #
+ # @param File: the extracted file
+ # @param ToFile: the destination file
+ #
+ def UnpackFile(self, File, ToFile):
+ File = File.replace('\\', '/')
+ if File in self._ZipFile.namelist():
+ Msg = "%s -> %s" % (File, ToFile)
+ Logger.Info(Msg)
+ self.Extract(File, ToFile)
+ return ToFile
+
+ return ''
+
+ ## Extract the file
+ #
+ # @param Which: the source path
+ # @param ToDest: the destination path
+ #
+ def Extract(self, Which, ToDest):
+ Which = os.path.normpath(Which)
+ if Which not in self._Files:
+ Logger.Error("PackagingTool", FILE_NOT_FOUND,
+ ExtraData="[%s] in %s" % (Which, self._FileName))
+ try:
+ FileContent = self._ZipFile.read(self._Files[Which])
+ except BaseException as Xstr:
+ Logger.Error("PackagingTool", FILE_DECOMPRESS_FAILURE,
+ ExtraData="[%s] in %s (%s)" % (Which, \
+ self._FileName, \
+ str(Xstr)))
+ try:
+ CreateDirectory(os.path.dirname(ToDest))
+ if os.path.exists(ToDest) and not os.access(ToDest, os.W_OK):
+ Logger.Warn("PackagingTool", \
+ ST.WRN_FILE_NOT_OVERWRITTEN % ToDest)
+ return
+ else:
+ ToFile = __FileHookOpen__(ToDest, 'wb')
+ except BaseException as Xstr:
+ Logger.Error("PackagingTool", FILE_OPEN_FAILURE,
+ ExtraData="%s (%s)" % (ToDest, str(Xstr)))
+
+ try:
+ ToFile.write(FileContent)
+ ToFile.close()
+ except BaseException as Xstr:
+ Logger.Error("PackagingTool", FILE_WRITE_FAILURE,
+ ExtraData="%s (%s)" % (ToDest, str(Xstr)))
+
+ ## Remove the file
+ #
+ # @param Files: the removed files
+ #
+ def Remove(self, Files):
+ TmpDir = os.path.join(tempfile.gettempdir(), ".packaging")
+ if os.path.exists(TmpDir):
+ RemoveDirectory(TmpDir, True)
+
+ os.mkdir(TmpDir)
+ self.Unpack(TmpDir)
+ for SinF in Files:
+ SinF = os.path.normpath(SinF)
+ if SinF not in self._Files:
+ Logger.Error("PackagingTool", FILE_NOT_FOUND,
+ ExtraData="%s is not in %s!" % \
+ (SinF, self._FileName))
+ self._Files.pop(SinF)
+ self._ZipFile.close()
+
+ self._ZipFile = zipfile.ZipFile(self._FileName, "w", \
+ zipfile.ZIP_DEFLATED)
+ Cwd = os.getcwd()
+ os.chdir(TmpDir)
+ self.PackFiles(self._Files)
+ os.chdir(Cwd)
+ RemoveDirectory(TmpDir, True)
+
+ ## Pack the files under Top directory, the directory shown in the zipFile start from BaseDir,
+ # BaseDir should be the parent directory of the Top directory, for example,
+ # Pack(Workspace\Dir1, Workspace) will pack files under Dir1, and the path in the zipfile will
+ # start from Workspace
+ #
+ # @param Top: the top directory
+ # @param BaseDir: the base directory
+ #
+ def Pack(self, Top, BaseDir):
+ if not os.path.isdir(Top):
+ Logger.Error("PackagingTool", FILE_UNKNOWN_ERROR, \
+ "%s is not a directory!" %Top)
+
+ FilesToPack = []
+ Cwd = os.getcwd()
+ os.chdir(BaseDir)
+ RelaDir = Top[Top.upper().find(BaseDir.upper()).\
+ join(len(BaseDir).join(1)):]
+
+ for Root, Dirs, Files in os.walk(RelaDir):
+ if 'CVS' in Dirs:
+ Dirs.remove('CVS')
+ if '.svn' in Dirs:
+ Dirs.remove('.svn')
+
+ for Dir in Dirs:
+ if Dir.startswith('.'):
+ Dirs.remove(Dir)
+ for File1 in Files:
+ if File1.startswith('.'):
+ continue
+ ExtName = os.path.splitext(File1)[1]
+ #
+ # skip '.dec', '.inf', '.dsc', '.fdf' files
+ #
+ if ExtName.lower() in ['.dec', '.inf', '.dsc', '.fdf']:
+ continue
+ FilesToPack.append(os.path.join(Root, File1))
+ self.PackFiles(FilesToPack)
+ os.chdir(Cwd)
+
+ ## Pack the file
+ #
+ # @param Files: the files to pack
+ #
+ def PackFiles(self, Files):
+ for File in Files:
+ Cwd = os.getcwd()
+ os.chdir(mws.getWs(mws.WORKSPACE, File))
+ self.PackFile(File)
+ os.chdir(Cwd)
+
+ ## Pack the file
+ #
+ # @param File: the files to pack
+ # @param ArcName: the Arc Name
+ #
+ def PackFile(self, File, ArcName=None):
+ try:
+ #
+ # avoid packing same file multiple times
+ #
+ if platform.system() != 'Windows':
+ File = File.replace('\\', '/')
+ ZipedFilesNameList = self._ZipFile.namelist()
+ for ZipedFile in ZipedFilesNameList:
+ if File == os.path.normpath(ZipedFile):
+ return
+ Logger.Info("packing ..." + File)
+ self._ZipFile.write(File, ArcName)
+ except BaseException as Xstr:
+ Logger.Error("PackagingTool", FILE_COMPRESS_FAILURE,
+ ExtraData="%s (%s)" % (File, str(Xstr)))
+
+ ## Write data to the packed file
+ #
+ # @param Data: data to write
+ # @param ArcName: the Arc Name
+ #
+ def PackData(self, Data, ArcName):
+ try:
+ if os.path.splitext(ArcName)[1].lower() == '.pkg':
+ Data = Data.encode('utf_8')
+ self._ZipFile.writestr(ArcName, Data)
+ except BaseException as Xstr:
+ Logger.Error("PackagingTool", FILE_COMPRESS_FAILURE,
+ ExtraData="%s (%s)" % (ArcName, str(Xstr)))
+
+ ## Close file
+ #
+ #
+ def Close(self):
+ self._ZipFile.close()
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/__init__.py
new file mode 100644
index 00000000..1b9f9b32
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Core/__init__.py
@@ -0,0 +1,14 @@
+## @file
+# Python 'Library' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Core init file
+'''
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py
new file mode 100755
index 00000000..6d79278c
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py
@@ -0,0 +1,683 @@
+## @file GenDecFile.py
+#
+# This file contained the logical of transfer package object to DEC files.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+GenDEC
+'''
+import os
+import stat
+import codecs
+from hashlib import md5
+from Core.FileHook import __FileHookOpen__
+from Library.Parsing import GenSection
+from Library.CommentGenerating import GenHeaderCommentSection
+from Library.CommentGenerating import GenGenericCommentF
+from Library.CommentGenerating import GenDecTailComment
+from Library.CommentGenerating import _GetHelpStr
+from Library.Misc import GuidStringToGuidStructureString
+from Library.Misc import SaveFileOnChange
+from Library.Misc import ConvertPath
+from Library.Misc import GetLocalValue
+from Library.DataType import TAB_SPACE_SPLIT
+from Library.DataType import TAB_COMMA_SPLIT
+from Library.DataType import END_OF_LINE
+from Library.DataType import TAB_ARCH_COMMON
+from Library.DataType import TAB_VALUE_SPLIT
+from Library.DataType import TAB_COMMENT_SPLIT
+from Library.DataType import TAB_PCD_VALIDRANGE
+from Library.DataType import TAB_PCD_VALIDLIST
+from Library.DataType import TAB_PCD_EXPRESSION
+from Library.DataType import TAB_DEC_DEFINES_DEC_SPECIFICATION
+from Library.DataType import TAB_DEC_DEFINES_PACKAGE_NAME
+from Library.DataType import TAB_DEC_DEFINES_PACKAGE_GUID
+from Library.DataType import TAB_DEC_DEFINES_PACKAGE_VERSION
+from Library.DataType import TAB_DEC_DEFINES_PKG_UNI_FILE
+from Library.DataType import TAB_DEC_PACKAGE_ABSTRACT
+from Library.DataType import TAB_DEC_PACKAGE_DESCRIPTION
+from Library.DataType import TAB_DEC_BINARY_ABSTRACT
+from Library.DataType import TAB_DEC_BINARY_DESCRIPTION
+from Library.DataType import TAB_LANGUAGE_EN_X
+from Library.DataType import TAB_BINARY_HEADER_USERID
+from Library.DataType import TAB_BINARY_HEADER_IDENTIFIER
+from Library.DataType import TAB_COMMENT_EDK1_SPLIT
+from Library.DataType import TAB_ENCODING_UTF16LE
+from Library.DataType import TAB_CAPHEX_START
+from Library.DataType import TAB_HEX_START
+from Library.DataType import TAB_UNDERLINE_SPLIT
+from Library.DataType import TAB_STR_TOKENERR
+from Library.DataType import TAB_STR_TOKENCNAME
+from Library.DataType import TAB_PCD_ERROR_SECTION_COMMENT
+from Library.DataType import TAB_PCD_ERROR
+from Library.DataType import TAB_SECTION_START
+from Library.DataType import TAB_SECTION_END
+from Library.DataType import TAB_SPLIT
+import Library.DataType as DT
+from Library.UniClassObject import FormatUniEntry
+from Library.StringUtils import GetUniFileName
+
+def GenPcd(Package, Content):
+ #
+ # generate [Pcd] section
+ # <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
+ #
+ ValidUsageDict = {}
+ for Pcd in Package.GetPcdList():
+ #
+ # Generate generic comment
+ #
+ HelpTextList = Pcd.GetHelpTextList()
+ HelpStr = _GetHelpStr(HelpTextList)
+ CommentStr = GenGenericCommentF(HelpStr, 2)
+
+ PromptList = Pcd.GetPromptList()
+ PromptStr = _GetHelpStr(PromptList)
+ CommentStr += GenGenericCommentF(PromptStr.strip(), 1, True)
+
+ PcdErrList = Pcd.GetPcdErrorsList()
+ for PcdErr in PcdErrList:
+ CommentStr += GenPcdErrComment(PcdErr)
+ Statement = CommentStr
+
+ CName = Pcd.GetCName()
+ TokenSpaceGuidCName = Pcd.GetTokenSpaceGuidCName()
+ DefaultValue = Pcd.GetDefaultValue()
+ DatumType = Pcd.GetDatumType()
+ Token = Pcd.GetToken()
+ ValidUsage = Pcd.GetValidUsage()
+
+ if ValidUsage == 'FeaturePcd':
+ ValidUsage = 'PcdsFeatureFlag'
+ elif ValidUsage == 'PatchPcd':
+ ValidUsage = 'PcdsPatchableInModule'
+ elif ValidUsage == 'FixedPcd':
+ ValidUsage = 'PcdsFixedAtBuild'
+ elif ValidUsage == 'Pcd':
+ ValidUsage = 'PcdsDynamic'
+ elif ValidUsage == 'PcdEx':
+ ValidUsage = 'PcdsDynamicEx'
+
+ if ValidUsage in ValidUsageDict:
+ NewSectionDict = ValidUsageDict[ValidUsage]
+ else:
+ NewSectionDict = {}
+ ValidUsageDict[ValidUsage] = NewSectionDict
+ Statement += TokenSpaceGuidCName + '.' + CName
+ Statement += '|' + DefaultValue
+ Statement += '|' + DatumType
+ Statement += '|' + Token
+ #
+ # generate tail comment
+ #
+ if Pcd.GetSupModuleList():
+ Statement += GenDecTailComment(Pcd.GetSupModuleList())
+
+ ArchList = sorted(Pcd.GetSupArchList())
+ SortedArch = ' '.join(ArchList)
+ if SortedArch in NewSectionDict:
+ NewSectionDict[SortedArch] = \
+ NewSectionDict[SortedArch] + [Statement]
+ else:
+ NewSectionDict[SortedArch] = [Statement]
+
+ for ValidUsage in ValidUsageDict:
+ Content += GenSection(ValidUsage, ValidUsageDict[ValidUsage], True, True)
+
+ return Content
+
+def GenPcdErrorMsgSection(Package, Content):
+ if not Package.PcdErrorCommentDict:
+ return Content
+
+ #
+ # Generate '# [Error.<TokenSpcCName>]' section
+ #
+ Content += END_OF_LINE + END_OF_LINE
+ SectionComment = TAB_COMMENT_SPLIT + END_OF_LINE
+ SectionComment += TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_ERROR_SECTION_COMMENT + END_OF_LINE
+ SectionComment += TAB_COMMENT_SPLIT + END_OF_LINE
+ TokenSpcCNameList = []
+
+ #
+ # Get TokenSpcCName list in PcdErrorCommentDict in Package object
+ #
+ for (TokenSpcCName, ErrorNumber) in Package.PcdErrorCommentDict:
+ if TokenSpcCName not in TokenSpcCNameList:
+ TokenSpcCNameList.append(TokenSpcCName)
+
+ for TokenSpcCNameItem in TokenSpcCNameList:
+ SectionName = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_SECTION_START + TAB_PCD_ERROR + \
+ TAB_SPLIT + TokenSpcCNameItem + TAB_SECTION_END + END_OF_LINE
+ Content += SectionComment
+ Content += SectionName
+ for (TokenSpcCName, ErrorNumber) in Package.PcdErrorCommentDict:
+ if TokenSpcCNameItem == TokenSpcCName:
+ PcdErrorMsg = GetLocalValue(Package.PcdErrorCommentDict[(TokenSpcCName, ErrorNumber)])
+ SectionItem = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_SPACE_SPLIT + \
+ ErrorNumber + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT + \
+ PcdErrorMsg + END_OF_LINE
+ Content += SectionItem
+
+ Content += TAB_COMMENT_SPLIT
+ return Content
+
+def GenGuidProtocolPpi(Package, Content):
+ #
+ # generate [Guids] section
+ #
+ NewSectionDict = {}
+
+ LeftOffset = 46
+ # Get the line offset need
+ # If the real one < the min one, use the min one
+ # else use the real one
+ for Guid in Package.GetGuidList():
+ if len(Guid.GetCName()) > LeftOffset:
+ LeftOffset = len(Guid.GetCName())
+
+ # Generate
+ for Guid in Package.GetGuidList():
+ #
+ # Generate generic comment
+ #
+ HelpTextList = Guid.GetHelpTextList()
+ HelpStr = _GetHelpStr(HelpTextList)
+ CommentStr = GenGenericCommentF(HelpStr, 2)
+
+ Statement = CommentStr
+ CName = Guid.GetCName()
+ Value = GuidStringToGuidStructureString(Guid.GetGuid())
+ Statement += CName.ljust(LeftOffset) + ' = ' + Value
+ #
+ # generate tail comment
+ #
+ if Guid.GetSupModuleList():
+ Statement += GenDecTailComment(Guid.GetSupModuleList())
+ ArchList = sorted(Guid.GetSupArchList())
+ SortedArch = ' '.join(ArchList)
+ if SortedArch in NewSectionDict:
+ NewSectionDict[SortedArch] = \
+ NewSectionDict[SortedArch] + [Statement]
+ else:
+ NewSectionDict[SortedArch] = [Statement]
+
+ Content += GenSection('Guids', NewSectionDict, True, True)
+
+ #
+ # generate [Protocols] section
+ #
+ NewSectionDict = {}
+ LeftOffset = 46
+ # Get the line offset need
+ # If the real one < the min one, use the min one
+ # else use the real one
+ for Protocol in Package.GetProtocolList():
+ if len(Protocol.GetCName()) > LeftOffset:
+ LeftOffset = len(Protocol.GetCName())
+
+ for Protocol in Package.GetProtocolList():
+ #
+ # Generate generic comment
+ #
+ HelpTextList = Protocol.GetHelpTextList()
+ HelpStr = _GetHelpStr(HelpTextList)
+ CommentStr = GenGenericCommentF(HelpStr, 2)
+
+ Statement = CommentStr
+ CName = Protocol.GetCName()
+ Value = GuidStringToGuidStructureString(Protocol.GetGuid())
+ Statement += CName.ljust(LeftOffset) + ' = ' + Value
+
+ #
+ # generate tail comment
+ #
+ if Protocol.GetSupModuleList():
+ Statement += GenDecTailComment(Protocol.GetSupModuleList())
+ ArchList = sorted(Protocol.GetSupArchList())
+ SortedArch = ' '.join(ArchList)
+ if SortedArch in NewSectionDict:
+ NewSectionDict[SortedArch] = \
+ NewSectionDict[SortedArch] + [Statement]
+ else:
+ NewSectionDict[SortedArch] = [Statement]
+
+ Content += GenSection('Protocols', NewSectionDict, True, True)
+
+ #
+ # generate [Ppis] section
+ #
+ NewSectionDict = {}
+ LeftOffset = 46
+ # Get the line offset need
+ # If the real one < the min one, use the min one
+ # else use the real one
+ for Ppi in Package.GetPpiList():
+ if len(Ppi.GetCName()) > LeftOffset:
+ LeftOffset = len(Ppi.GetCName())
+
+ for Ppi in Package.GetPpiList():
+ #
+ # Generate generic comment
+ #
+ HelpTextList = Ppi.GetHelpTextList()
+ HelpStr = _GetHelpStr(HelpTextList)
+ CommentStr = GenGenericCommentF(HelpStr, 2)
+
+ Statement = CommentStr
+ CName = Ppi.GetCName()
+ Value = GuidStringToGuidStructureString(Ppi.GetGuid())
+ Statement += CName.ljust(LeftOffset) + ' = ' + Value
+
+ #
+ # generate tail comment
+ #
+ if Ppi.GetSupModuleList():
+ Statement += GenDecTailComment(Ppi.GetSupModuleList())
+ ArchList = sorted(Ppi.GetSupArchList())
+ SortedArch = ' '.join(ArchList)
+ if SortedArch in NewSectionDict:
+ NewSectionDict[SortedArch] = \
+ NewSectionDict[SortedArch] + [Statement]
+ else:
+ NewSectionDict[SortedArch] = [Statement]
+
+ Content += GenSection('Ppis', NewSectionDict, True, True)
+
+ return Content
+
+## Transfer Package Object to Dec files
+#
+# Transfer all contents of a standard Package Object to a Dec file
+#
+# @param Package: A Package
+#
+def PackageToDec(Package, DistHeader = None):
+ #
+ # Init global information for the file
+ #
+ ContainerFile = Package.GetFullPath()
+
+ Content = ''
+
+ #
+ # Generate file header
+ #
+ PackageAbstract = GetLocalValue(Package.GetAbstract())
+ PackageDescription = GetLocalValue(Package.GetDescription())
+ PackageCopyright = ''
+ PackageLicense = ''
+ for (Lang, Copyright) in Package.GetCopyright():
+ if Lang:
+ pass
+ PackageCopyright = Copyright
+ for (Lang, License) in Package.GetLicense():
+ if Lang:
+ pass
+ PackageLicense = License
+ if not PackageAbstract and DistHeader:
+ PackageAbstract = GetLocalValue(DistHeader.GetAbstract())
+ if not PackageDescription and DistHeader:
+ PackageDescription = GetLocalValue(DistHeader.GetDescription())
+ if not PackageCopyright and DistHeader:
+ for (Lang, Copyright) in DistHeader.GetCopyright():
+ PackageCopyright = Copyright
+ if not PackageLicense and DistHeader:
+ for (Lang, License) in DistHeader.GetLicense():
+ PackageLicense = License
+
+ #
+ # Generate header comment section of DEC file
+ #
+ Content += GenHeaderCommentSection(PackageAbstract, \
+ PackageDescription, \
+ PackageCopyright, \
+ PackageLicense).replace('\r\n', '\n')
+
+ #
+ # Generate Binary header
+ #
+ for UserExtension in Package.GetUserExtensionList():
+ if UserExtension.GetUserID() == TAB_BINARY_HEADER_USERID \
+ and UserExtension.GetIdentifier() == TAB_BINARY_HEADER_IDENTIFIER:
+ PackageBinaryAbstract = GetLocalValue(UserExtension.GetBinaryAbstract())
+ PackageBinaryDescription = GetLocalValue(UserExtension.GetBinaryDescription())
+ PackageBinaryCopyright = ''
+ PackageBinaryLicense = ''
+ for (Lang, Copyright) in UserExtension.GetBinaryCopyright():
+ PackageBinaryCopyright = Copyright
+ for (Lang, License) in UserExtension.GetBinaryLicense():
+ PackageBinaryLicense = License
+ if PackageBinaryAbstract and PackageBinaryDescription and \
+ PackageBinaryCopyright and PackageBinaryLicense:
+ Content += GenHeaderCommentSection(PackageBinaryAbstract,
+ PackageBinaryDescription,
+ PackageBinaryCopyright,
+ PackageBinaryLicense,
+ True)
+
+ #
+ # Generate PACKAGE_UNI_FILE for the Package
+ #
+ FileHeader = GenHeaderCommentSection(PackageAbstract, PackageDescription, PackageCopyright, PackageLicense, False, \
+ TAB_COMMENT_EDK1_SPLIT)
+ GenPackageUNIEncodeFile(Package, FileHeader)
+
+ #
+ # for each section, maintain a dict, sorted arch will be its key,
+ #statement list will be its data
+ # { 'Arch1 Arch2 Arch3': [statement1, statement2],
+ # 'Arch1' : [statement1, statement3]
+ # }
+ #
+
+ #
+ # generate [Defines] section
+ #
+ LeftOffset = 31
+ NewSectionDict = {TAB_ARCH_COMMON : []}
+ SpecialItemList = []
+
+ Statement = (u'%s ' % TAB_DEC_DEFINES_DEC_SPECIFICATION).ljust(LeftOffset) + u'= %s' % '0x00010017'
+ SpecialItemList.append(Statement)
+
+ BaseName = Package.GetBaseName()
+ if BaseName.startswith('.') or BaseName.startswith('-'):
+ BaseName = '_' + BaseName
+ Statement = (u'%s ' % TAB_DEC_DEFINES_PACKAGE_NAME).ljust(LeftOffset) + u'= %s' % BaseName
+ SpecialItemList.append(Statement)
+
+ Statement = (u'%s ' % TAB_DEC_DEFINES_PACKAGE_VERSION).ljust(LeftOffset) + u'= %s' % Package.GetVersion()
+ SpecialItemList.append(Statement)
+
+ Statement = (u'%s ' % TAB_DEC_DEFINES_PACKAGE_GUID).ljust(LeftOffset) + u'= %s' % Package.GetGuid()
+ SpecialItemList.append(Statement)
+
+ if Package.UNIFlag:
+ Statement = (u'%s ' % TAB_DEC_DEFINES_PKG_UNI_FILE).ljust(LeftOffset) + u'= %s' % Package.GetBaseName() + '.uni'
+ SpecialItemList.append(Statement)
+
+ for SortedArch in NewSectionDict:
+ NewSectionDict[SortedArch] = \
+ NewSectionDict[SortedArch] + SpecialItemList
+ Content += GenSection('Defines', NewSectionDict)
+
+ #
+ # generate [Includes] section
+ #
+ NewSectionDict = {}
+ IncludeArchList = Package.GetIncludeArchList()
+ if IncludeArchList:
+ for Path, ArchList in IncludeArchList:
+ Statement = Path
+ ArchList.sort()
+ SortedArch = ' '.join(ArchList)
+ if SortedArch in NewSectionDict:
+ NewSectionDict[SortedArch] = \
+ NewSectionDict[SortedArch] + [ConvertPath(Statement)]
+ else:
+ NewSectionDict[SortedArch] = [ConvertPath(Statement)]
+
+ Content += GenSection('Includes', NewSectionDict)
+
+ #
+ # generate [guids][protocols][ppis] sections
+ #
+ Content = GenGuidProtocolPpi(Package, Content)
+
+ #
+ # generate [LibraryClasses] section
+ #
+ NewSectionDict = {}
+ for LibraryClass in Package.GetLibraryClassList():
+ #
+ # Generate generic comment
+ #
+ HelpTextList = LibraryClass.GetHelpTextList()
+ HelpStr = _GetHelpStr(HelpTextList)
+ if HelpStr:
+ HelpStr = '@libraryclass' + HelpStr
+ CommentStr = GenGenericCommentF(HelpStr, 2, False, True)
+
+ Statement = CommentStr
+ Name = LibraryClass.GetLibraryClass()
+ IncludeHeader = LibraryClass.GetIncludeHeader()
+ Statement += Name + '|' + ConvertPath(IncludeHeader)
+ #
+ # generate tail comment
+ #
+ if LibraryClass.GetSupModuleList():
+ Statement += \
+ GenDecTailComment(LibraryClass.GetSupModuleList())
+ ArchList = sorted(LibraryClass.GetSupArchList())
+ SortedArch = ' '.join(ArchList)
+ if SortedArch in NewSectionDict:
+ NewSectionDict[SortedArch] = \
+ NewSectionDict[SortedArch] + [Statement]
+ else:
+ NewSectionDict[SortedArch] = [Statement]
+
+ Content += GenSection('LibraryClasses', NewSectionDict, True, True)
+
+ #
+ # Generate '# [Error.<TokenSpcCName>]' section
+ #
+ Content = GenPcdErrorMsgSection(Package, Content)
+
+ Content = GenPcd(Package, Content)
+
+ #
+ # generate [UserExtensions] section
+ #
+ NewSectionDict = {}
+ for UserExtension in Package.GetUserExtensionList():
+ if UserExtension.GetUserID() == TAB_BINARY_HEADER_USERID and \
+ UserExtension.GetIdentifier() == TAB_BINARY_HEADER_IDENTIFIER:
+ continue
+
+ # Generate Private Section first
+ if UserExtension.GetUserID() == DT.TAB_INTEL and UserExtension.GetIdentifier() == DT.TAB_PRIVATE:
+ Content += '\n' + UserExtension.GetStatement()
+ continue
+
+ Statement = UserExtension.GetStatement()
+ if not Statement:
+ continue
+ else:
+ LineList = Statement.split('\n')
+ NewStatement = ""
+ for Line in LineList:
+ NewStatement += " %s\n" % Line
+
+ SectionList = []
+ SectionName = 'UserExtensions'
+ UserId = UserExtension.GetUserID()
+ if UserId:
+ if '.' in UserId:
+ UserId = '"' + UserId + '"'
+ SectionName += '.' + UserId
+ if UserExtension.GetIdentifier():
+ SectionName += '.' + '"' + UserExtension.GetIdentifier() + '"'
+ if not UserExtension.GetSupArchList():
+ SectionList.append(SectionName)
+ else:
+ for Arch in UserExtension.GetSupArchList():
+ SectionList.append(SectionName + '.' + Arch)
+ SectionName = ', '.join(SectionList)
+ SectionName = ''.join(['[', SectionName, ']\n'])
+ Content += '\n' + SectionName + NewStatement
+
+ SaveFileOnChange(ContainerFile, Content, False)
+ if DistHeader.ReadOnly:
+ os.chmod(ContainerFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
+ else:
+ os.chmod(ContainerFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH|stat.S_IWUSR|stat.S_IWGRP|stat.S_IWOTH)
+ return ContainerFile
+
+## GenPackageUNIEncodeFile
+# GenPackageUNIEncodeFile, default is a UCS-2LE encode file
+#
+def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCODING_UTF16LE):
+ GenUNIFlag = False
+ OnlyLANGUAGE_EN_X = True
+ BinaryAbstract = []
+ BinaryDescription = []
+ #
+ # If more than one language code is used for any element that would be present in the PACKAGE_UNI_FILE,
+ # then the PACKAGE_UNI_FILE must be created.
+ #
+ for (Key, Value) in PackageObject.GetAbstract() + PackageObject.GetDescription():
+ if Key == TAB_LANGUAGE_EN_X:
+ GenUNIFlag = True
+ else:
+ OnlyLANGUAGE_EN_X = False
+
+ for UserExtension in PackageObject.GetUserExtensionList():
+ if UserExtension.GetUserID() == TAB_BINARY_HEADER_USERID \
+ and UserExtension.GetIdentifier() == TAB_BINARY_HEADER_IDENTIFIER:
+ for (Key, Value) in UserExtension.GetBinaryAbstract():
+ if Key == TAB_LANGUAGE_EN_X:
+ GenUNIFlag = True
+ else:
+ OnlyLANGUAGE_EN_X = False
+ BinaryAbstract.append((Key, Value))
+
+ for (Key, Value) in UserExtension.GetBinaryDescription():
+ if Key == TAB_LANGUAGE_EN_X:
+ GenUNIFlag = True
+ else:
+ OnlyLANGUAGE_EN_X = False
+ BinaryDescription.append((Key, Value))
+
+ for Pcd in PackageObject.GetPcdList():
+ for TxtObj in Pcd.GetPromptList() + Pcd.GetHelpTextList():
+ if TxtObj.GetLang() == TAB_LANGUAGE_EN_X:
+ GenUNIFlag = True
+ else:
+ OnlyLANGUAGE_EN_X = False
+
+ for PcdError in Pcd.GetPcdErrorsList():
+ if PcdError.GetErrorNumber().startswith('0x') or PcdError.GetErrorNumber().startswith('0X'):
+ for (Key, Value) in PcdError.GetErrorMessageList():
+ if Key == TAB_LANGUAGE_EN_X:
+ GenUNIFlag = True
+ else:
+ OnlyLANGUAGE_EN_X = False
+ if not GenUNIFlag:
+ return
+ elif OnlyLANGUAGE_EN_X:
+ return
+ else:
+ PackageObject.UNIFlag = True
+
+ if not os.path.exists(os.path.dirname(PackageObject.GetFullPath())):
+ os.makedirs(os.path.dirname(PackageObject.GetFullPath()))
+
+ ContainerFile = GetUniFileName(os.path.dirname(PackageObject.GetFullPath()), PackageObject.GetBaseName())
+
+ Content = UniFileHeader + '\r\n'
+ Content += '\r\n'
+
+ Content += FormatUniEntry('#string ' + TAB_DEC_PACKAGE_ABSTRACT, PackageObject.GetAbstract(), ContainerFile) + '\r\n'
+
+ Content += FormatUniEntry('#string ' + TAB_DEC_PACKAGE_DESCRIPTION, PackageObject.GetDescription(), ContainerFile) \
+ + '\r\n'
+
+ Content += FormatUniEntry('#string ' + TAB_DEC_BINARY_ABSTRACT, BinaryAbstract, ContainerFile) + '\r\n'
+
+ Content += FormatUniEntry('#string ' + TAB_DEC_BINARY_DESCRIPTION, BinaryDescription, ContainerFile) + '\r\n'
+
+ PromptGenList = []
+ HelpTextGenList = []
+ for Pcd in PackageObject.GetPcdList():
+ # Generate Prompt for each Pcd
+ PcdPromptStrName = '#string ' + 'STR_' + Pcd.GetTokenSpaceGuidCName() + '_' + Pcd.GetCName() + '_PROMPT '
+ TokenValueList = []
+ for TxtObj in Pcd.GetPromptList():
+ Lang = TxtObj.GetLang()
+ PromptStr = TxtObj.GetString()
+ #
+ # Avoid generating the same PROMPT entry more than one time.
+ #
+ if (PcdPromptStrName, Lang) not in PromptGenList:
+ TokenValueList.append((Lang, PromptStr))
+ PromptGenList.append((PcdPromptStrName, Lang))
+ PromptString = FormatUniEntry(PcdPromptStrName, TokenValueList, ContainerFile) + '\r\n'
+ if PromptString not in Content:
+ Content += PromptString
+
+ # Generate Help String for each Pcd
+ PcdHelpStrName = '#string ' + 'STR_' + Pcd.GetTokenSpaceGuidCName() + '_' + Pcd.GetCName() + '_HELP '
+ TokenValueList = []
+ for TxtObj in Pcd.GetHelpTextList():
+ Lang = TxtObj.GetLang()
+ HelpStr = TxtObj.GetString()
+ #
+ # Avoid generating the same HELP entry more than one time.
+ #
+ if (PcdHelpStrName, Lang) not in HelpTextGenList:
+ TokenValueList.append((Lang, HelpStr))
+ HelpTextGenList.append((PcdHelpStrName, Lang))
+ HelpTextString = FormatUniEntry(PcdHelpStrName, TokenValueList, ContainerFile) + '\r\n'
+ if HelpTextString not in Content:
+ Content += HelpTextString
+
+ # Generate PcdError for each Pcd if ErrorNo exist.
+ for PcdError in Pcd.GetPcdErrorsList():
+ ErrorNo = PcdError.GetErrorNumber()
+ if ErrorNo.startswith(TAB_HEX_START) or ErrorNo.startswith(TAB_CAPHEX_START):
+ PcdErrStrName = '#string ' + TAB_STR_TOKENCNAME + TAB_UNDERLINE_SPLIT + Pcd.GetTokenSpaceGuidCName() \
+ + TAB_UNDERLINE_SPLIT + TAB_STR_TOKENERR \
+ + TAB_UNDERLINE_SPLIT + ErrorNo[2:]
+ PcdErrString = FormatUniEntry(PcdErrStrName, PcdError.GetErrorMessageList(), ContainerFile) + '\r\n'
+ if PcdErrString not in Content:
+ Content += PcdErrString
+
+ File = codecs.open(ContainerFile, 'w', Encoding)
+ File.write(u'\uFEFF' + Content)
+ File.stream.close()
+ Md5Signature = md5(__FileHookOpen__(str(ContainerFile), 'rb').read())
+ Md5Sum = Md5Signature.hexdigest()
+ if (ContainerFile, Md5Sum) not in PackageObject.FileList:
+ PackageObject.FileList.append((ContainerFile, Md5Sum))
+
+ return ContainerFile
+
+## GenPcdErrComment
+#
+# @param PcdErrObject: PcdErrorObject
+#
+# @retval CommentStr: Generated comment lines, with prefix "#"
+#
+def GenPcdErrComment (PcdErrObject):
+ CommentStr = ''
+ ErrorCode = PcdErrObject.GetErrorNumber()
+ ValidValueRange = PcdErrObject.GetValidValueRange()
+ if ValidValueRange:
+ CommentStr = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_VALIDRANGE + TAB_SPACE_SPLIT
+ if ErrorCode:
+ CommentStr += ErrorCode + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT
+ CommentStr += ValidValueRange + END_OF_LINE
+
+ ValidValue = PcdErrObject.GetValidValue()
+ if ValidValue:
+ ValidValueList = \
+ [Value for Value in ValidValue.split(TAB_SPACE_SPLIT) if Value]
+ CommentStr = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_VALIDLIST + TAB_SPACE_SPLIT
+ if ErrorCode:
+ CommentStr += ErrorCode + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT
+ CommentStr += TAB_COMMA_SPLIT.join(ValidValueList) + END_OF_LINE
+
+ Expression = PcdErrObject.GetExpression()
+ if Expression:
+ CommentStr = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_EXPRESSION + TAB_SPACE_SPLIT
+ if ErrorCode:
+ CommentStr += ErrorCode + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT
+ CommentStr += Expression + END_OF_LINE
+
+ return CommentStr
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
new file mode 100755
index 00000000..83876fe8
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
@@ -0,0 +1,1122 @@
+## @file GenInfFile.py
+#
+# This file contained the logical of transfer package object to INF files.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+'''
+GenInf
+'''
+import os
+import stat
+import codecs
+from hashlib import md5
+from Core.FileHook import __FileHookOpen__
+from Library.StringUtils import GetSplitValueList
+from Library.Parsing import GenSection
+from Library.Parsing import GetWorkspacePackage
+from Library.Parsing import ConvertArchForInstall
+from Library.Misc import SaveFileOnChange
+from Library.Misc import IsAllModuleList
+from Library.Misc import Sdict
+from Library.Misc import ConvertPath
+from Library.Misc import ConvertSpec
+from Library.Misc import GetRelativePath
+from Library.Misc import GetLocalValue
+from Library.CommentGenerating import GenHeaderCommentSection
+from Library.CommentGenerating import GenGenericCommentF
+from Library.CommentGenerating import _GetHelpStr
+from Library import GlobalData
+from Logger import StringTable as ST
+from Logger import ToolError
+import Logger.Log as Logger
+from Library import DataType as DT
+from GenMetaFile import GenMetaFileMisc
+from Library.UniClassObject import FormatUniEntry
+from Library.StringUtils import GetUniFileName
+
+
+## Transfer Module Object to Inf files
+#
+# Transfer all contents of a standard Module Object to an Inf file
+# @param ModuleObject: A Module Object
+#
+def ModuleToInf(ModuleObject, PackageObject=None, DistHeader=None):
+ if not GlobalData.gWSPKG_LIST:
+ GlobalData.gWSPKG_LIST = GetWorkspacePackage()
+ #
+ # Init global information for the file
+ #
+ ContainerFile = ModuleObject.GetFullPath()
+
+ Content = ''
+ #
+ # Generate file header, If any Abstract, Description, Copyright or License XML elements are missing,
+ # should 1) use the Abstract, Description, Copyright or License from the PackageSurfaceArea.Header elements
+ # that the module belongs to, or 2) if this is a stand-alone module that is not included in a PackageSurfaceArea,
+ # use the abstract, description, copyright or license from the DistributionPackage.Header elements.
+ #
+ ModuleAbstract = GetLocalValue(ModuleObject.GetAbstract())
+ if not ModuleAbstract and PackageObject:
+ ModuleAbstract = GetLocalValue(PackageObject.GetAbstract())
+ if not ModuleAbstract and DistHeader:
+ ModuleAbstract = GetLocalValue(DistHeader.GetAbstract())
+ ModuleDescription = GetLocalValue(ModuleObject.GetDescription())
+ if not ModuleDescription and PackageObject:
+ ModuleDescription = GetLocalValue(PackageObject.GetDescription())
+ if not ModuleDescription and DistHeader:
+ ModuleDescription = GetLocalValue(DistHeader.GetDescription())
+ ModuleCopyright = ''
+ for (Lang, Copyright) in ModuleObject.GetCopyright():
+ if Lang:
+ pass
+ ModuleCopyright = Copyright
+ if not ModuleCopyright and PackageObject:
+ for (Lang, Copyright) in PackageObject.GetCopyright():
+ if Lang:
+ pass
+ ModuleCopyright = Copyright
+ if not ModuleCopyright and DistHeader:
+ for (Lang, Copyright) in DistHeader.GetCopyright():
+ if Lang:
+ pass
+ ModuleCopyright = Copyright
+ ModuleLicense = ''
+ for (Lang, License) in ModuleObject.GetLicense():
+ if Lang:
+ pass
+ ModuleLicense = License
+ if not ModuleLicense and PackageObject:
+ for (Lang, License) in PackageObject.GetLicense():
+ if Lang:
+ pass
+ ModuleLicense = License
+ if not ModuleLicense and DistHeader:
+ for (Lang, License) in DistHeader.GetLicense():
+ if Lang:
+ pass
+ ModuleLicense = License
+
+ #
+ # Generate header comment section of INF file
+ #
+ Content += GenHeaderCommentSection(ModuleAbstract,
+ ModuleDescription,
+ ModuleCopyright,
+ ModuleLicense).replace('\r\n', '\n')
+
+ #
+ # Generate Binary Header
+ #
+ for UserExtension in ModuleObject.GetUserExtensionList():
+ if UserExtension.GetUserID() == DT.TAB_BINARY_HEADER_USERID \
+ and UserExtension.GetIdentifier() == DT.TAB_BINARY_HEADER_IDENTIFIER:
+ ModuleBinaryAbstract = GetLocalValue(UserExtension.GetBinaryAbstract())
+ ModuleBinaryDescription = GetLocalValue(UserExtension.GetBinaryDescription())
+ ModuleBinaryCopyright = ''
+ ModuleBinaryLicense = ''
+ for (Lang, Copyright) in UserExtension.GetBinaryCopyright():
+ ModuleBinaryCopyright = Copyright
+ for (Lang, License) in UserExtension.GetBinaryLicense():
+ ModuleBinaryLicense = License
+ if ModuleBinaryAbstract and ModuleBinaryDescription and \
+ ModuleBinaryCopyright and ModuleBinaryLicense:
+ Content += GenHeaderCommentSection(ModuleBinaryAbstract,
+ ModuleBinaryDescription,
+ ModuleBinaryCopyright,
+ ModuleBinaryLicense,
+ True)
+
+ #
+ # Generate MODULE_UNI_FILE for module
+ #
+ FileHeader = GenHeaderCommentSection(ModuleAbstract, ModuleDescription, ModuleCopyright, ModuleLicense, False, \
+ DT.TAB_COMMENT_EDK1_SPLIT)
+ ModuleUniFile = GenModuleUNIEncodeFile(ModuleObject, FileHeader)
+ if ModuleUniFile:
+ ModuleObject.SetModuleUniFile(os.path.basename(ModuleUniFile))
+
+ #
+ # Judge whether the INF file is an AsBuild INF.
+ #
+ if ModuleObject.BinaryModule:
+ GlobalData.gIS_BINARY_INF = True
+ else:
+ GlobalData.gIS_BINARY_INF = False
+ #
+ # for each section, maintain a dict, sorted arch will be its key,
+ # statement list will be its data
+ # { 'Arch1 Arch2 Arch3': [statement1, statement2],
+ # 'Arch1' : [statement1, statement3]
+ # }
+ #
+ # Gen section contents
+ #
+ Content += GenDefines(ModuleObject)
+ Content += GenBuildOptions(ModuleObject)
+ Content += GenLibraryClasses(ModuleObject)
+ Content += GenPackages(ModuleObject)
+ Content += GenPcdSections(ModuleObject)
+ Content += GenSources(ModuleObject)
+ Content += GenProtocolPPiSections(ModuleObject.GetProtocolList(), True)
+ Content += GenProtocolPPiSections(ModuleObject.GetPpiList(), False)
+ Content += GenGuidSections(ModuleObject.GetGuidList())
+ Content += GenBinaries(ModuleObject)
+ Content += GenDepex(ModuleObject)
+ __UserExtensionsContent = GenUserExtensions(ModuleObject)
+ Content += __UserExtensionsContent
+ if ModuleObject.GetEventList() or ModuleObject.GetBootModeList() or ModuleObject.GetHobList():
+ Content += '\n'
+ #
+ # generate [Event], [BootMode], [Hob] section
+ #
+ Content += GenSpecialSections(ModuleObject.GetEventList(), 'Event', __UserExtensionsContent)
+ Content += GenSpecialSections(ModuleObject.GetBootModeList(), 'BootMode', __UserExtensionsContent)
+ Content += GenSpecialSections(ModuleObject.GetHobList(), 'Hob', __UserExtensionsContent)
+ SaveFileOnChange(ContainerFile, Content, False)
+ if DistHeader.ReadOnly:
+ os.chmod(ContainerFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
+ else:
+ os.chmod(ContainerFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH|stat.S_IWUSR|stat.S_IWGRP|stat.S_IWOTH)
+ return ContainerFile
+
+## GenModuleUNIEncodeFile
+# GenModuleUNIEncodeFile, default is a UCS-2LE encode file
+#
+def GenModuleUNIEncodeFile(ModuleObject, UniFileHeader='', Encoding=DT.TAB_ENCODING_UTF16LE):
+ GenUNIFlag = False
+ OnlyLANGUAGE_EN_X = True
+ BinaryAbstract = []
+ BinaryDescription = []
+ #
+ # If more than one language code is used for any element that would be present in the MODULE_UNI_FILE,
+ # then the MODULE_UNI_FILE must be created.
+ #
+ for (Key, Value) in ModuleObject.GetAbstract() + ModuleObject.GetDescription():
+ if Key == DT.TAB_LANGUAGE_EN_X:
+ GenUNIFlag = True
+ else:
+ OnlyLANGUAGE_EN_X = False
+
+ for UserExtension in ModuleObject.GetUserExtensionList():
+ if UserExtension.GetUserID() == DT.TAB_BINARY_HEADER_USERID \
+ and UserExtension.GetIdentifier() == DT.TAB_BINARY_HEADER_IDENTIFIER:
+ for (Key, Value) in UserExtension.GetBinaryAbstract():
+ if Key == DT.TAB_LANGUAGE_EN_X:
+ GenUNIFlag = True
+ else:
+ OnlyLANGUAGE_EN_X = False
+ BinaryAbstract.append((Key, Value))
+ for (Key, Value) in UserExtension.GetBinaryDescription():
+ if Key == DT.TAB_LANGUAGE_EN_X:
+ GenUNIFlag = True
+ else:
+ OnlyLANGUAGE_EN_X = False
+ BinaryDescription.append((Key, Value))
+
+
+ if not GenUNIFlag:
+ return
+ elif OnlyLANGUAGE_EN_X:
+ return
+ else:
+ ModuleObject.UNIFlag = True
+ ContainerFile = GetUniFileName(os.path.dirname(ModuleObject.GetFullPath()), ModuleObject.GetBaseName())
+
+ if not os.path.exists(os.path.dirname(ModuleObject.GetFullPath())):
+ os.makedirs(os.path.dirname(ModuleObject.GetFullPath()))
+
+ Content = UniFileHeader + '\r\n'
+ Content += '\r\n'
+
+ Content += FormatUniEntry('#string ' + DT.TAB_INF_ABSTRACT, ModuleObject.GetAbstract(), ContainerFile) + '\r\n'
+
+ Content += FormatUniEntry('#string ' + DT.TAB_INF_DESCRIPTION, ModuleObject.GetDescription(), ContainerFile) \
+ + '\r\n'
+
+ BinaryAbstractString = FormatUniEntry('#string ' + DT.TAB_INF_BINARY_ABSTRACT, BinaryAbstract, ContainerFile)
+ if BinaryAbstractString:
+ Content += BinaryAbstractString + '\r\n'
+
+ BinaryDescriptionString = FormatUniEntry('#string ' + DT.TAB_INF_BINARY_DESCRIPTION, BinaryDescription, \
+ ContainerFile)
+ if BinaryDescriptionString:
+ Content += BinaryDescriptionString + '\r\n'
+
+ if not os.path.exists(ContainerFile):
+ File = codecs.open(ContainerFile, 'wb', Encoding)
+ File.write(u'\uFEFF' + Content)
+ File.stream.close()
+ Md5Signature = md5(__FileHookOpen__(str(ContainerFile), 'rb').read())
+ Md5Sum = Md5Signature.hexdigest()
+ if (ContainerFile, Md5Sum) not in ModuleObject.FileList:
+ ModuleObject.FileList.append((ContainerFile, Md5Sum))
+
+ return ContainerFile
+def GenDefines(ModuleObject):
+ #
+ # generate [Defines] section
+ #
+ LeftOffset = 31
+ Content = ''
+ NewSectionDict = {}
+
+ for UserExtension in ModuleObject.GetUserExtensionList():
+ DefinesDict = UserExtension.GetDefinesDict()
+ if not DefinesDict:
+ continue
+ for Statement in DefinesDict:
+ if len(Statement.split(DT.TAB_EQUAL_SPLIT)) > 1:
+ Statement = (u'%s ' % Statement.split(DT.TAB_EQUAL_SPLIT, 1)[0]).ljust(LeftOffset) \
+ + u'= %s' % Statement.split(DT.TAB_EQUAL_SPLIT, 1)[1].lstrip()
+ SortedArch = DT.TAB_ARCH_COMMON
+ if Statement.strip().startswith(DT.TAB_INF_DEFINES_CUSTOM_MAKEFILE):
+ pos = Statement.find(DT.TAB_VALUE_SPLIT)
+ if pos == -1:
+ pos = Statement.find(DT.TAB_EQUAL_SPLIT)
+ Makefile = ConvertPath(Statement[pos + 1:].strip())
+ Statement = Statement[:pos + 1] + ' ' + Makefile
+ if SortedArch in NewSectionDict:
+ NewSectionDict[SortedArch] = NewSectionDict[SortedArch] + [Statement]
+ else:
+ NewSectionDict[SortedArch] = [Statement]
+ SpecialStatementList = []
+
+ # TAB_INF_DEFINES_INF_VERSION
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_INF_VERSION).ljust(LeftOffset) + u'= %s' % '0x00010017'
+ SpecialStatementList.append(Statement)
+
+ # BaseName
+ BaseName = ModuleObject.GetBaseName()
+ if BaseName.startswith('.') or BaseName.startswith('-'):
+ BaseName = '_' + BaseName
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_BASE_NAME).ljust(LeftOffset) + u'= %s' % BaseName
+ SpecialStatementList.append(Statement)
+
+ # TAB_INF_DEFINES_FILE_GUID
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_FILE_GUID).ljust(LeftOffset) + u'= %s' % ModuleObject.GetGuid()
+ SpecialStatementList.append(Statement)
+
+ # TAB_INF_DEFINES_VERSION_STRING
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_VERSION_STRING).ljust(LeftOffset) + u'= %s' % ModuleObject.GetVersion()
+ SpecialStatementList.append(Statement)
+
+ # TAB_INF_DEFINES_VERSION_STRING
+ if ModuleObject.UNIFlag:
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_MODULE_UNI_FILE).ljust(LeftOffset) + \
+ u'= %s' % ModuleObject.GetModuleUniFile()
+ SpecialStatementList.append(Statement)
+
+ # TAB_INF_DEFINES_MODULE_TYPE
+ if ModuleObject.GetModuleType():
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_MODULE_TYPE).ljust(LeftOffset) + u'= %s' % ModuleObject.GetModuleType()
+ SpecialStatementList.append(Statement)
+
+ # TAB_INF_DEFINES_PCD_IS_DRIVER
+ if ModuleObject.GetPcdIsDriver():
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_PCD_IS_DRIVER).ljust(LeftOffset) + \
+ u'= %s' % ModuleObject.GetPcdIsDriver()
+ SpecialStatementList.append(Statement)
+
+ # TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION
+ if ModuleObject.GetUefiSpecificationVersion():
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION).ljust(LeftOffset) + \
+ u'= %s' % ModuleObject.GetUefiSpecificationVersion()
+ SpecialStatementList.append(Statement)
+
+ # TAB_INF_DEFINES_PI_SPECIFICATION_VERSION
+ if ModuleObject.GetPiSpecificationVersion():
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_PI_SPECIFICATION_VERSION).ljust(LeftOffset) + \
+ u'= %s' % ModuleObject.GetPiSpecificationVersion()
+ SpecialStatementList.append(Statement)
+
+ # LibraryClass
+ for LibraryClass in ModuleObject.GetLibraryClassList():
+ if LibraryClass.GetUsage() == DT.USAGE_ITEM_PRODUCES or \
+ LibraryClass.GetUsage() == DT.USAGE_ITEM_SOMETIMES_PRODUCES:
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_LIBRARY_CLASS).ljust(LeftOffset) + \
+ u'= %s' % LibraryClass.GetLibraryClass()
+ if LibraryClass.GetSupModuleList():
+ Statement += '|' + DT.TAB_SPACE_SPLIT.join(l for l in LibraryClass.GetSupModuleList())
+ SpecialStatementList.append(Statement)
+
+ # Spec Item
+ for SpecItem in ModuleObject.GetSpecList():
+ Spec, Version = SpecItem
+ Spec = ConvertSpec(Spec)
+ Statement = '%s %s = %s' % (DT.TAB_INF_DEFINES_SPEC, Spec, Version)
+ SpecialStatementList.append(Statement)
+
+ # Extern
+ ExternList = []
+ for Extern in ModuleObject.GetExternList():
+ ArchList = Extern.GetSupArchList()
+ EntryPoint = Extern.GetEntryPoint()
+ UnloadImage = Extern.GetUnloadImage()
+ Constructor = Extern.GetConstructor()
+ Destructor = Extern.GetDestructor()
+ HelpStringList = Extern.GetHelpTextList()
+ FFE = Extern.GetFeatureFlag()
+ ExternList.append([ArchList, EntryPoint, UnloadImage, Constructor, Destructor, FFE, HelpStringList])
+ #
+ # Add VALID_ARCHITECTURES information
+ #
+ ValidArchStatement = None
+ if ModuleObject.SupArchList:
+ ValidArchStatement = '\n' + '# ' + '\n'
+ ValidArchStatement += '# The following information is for reference only and not required by the build tools.\n'
+ ValidArchStatement += '# ' + '\n'
+ ValidArchStatement += '# VALID_ARCHITECTURES = %s' % (' '.join(ModuleObject.SupArchList)) + '\n'
+ ValidArchStatement += '# '
+ if DT.TAB_ARCH_COMMON not in NewSectionDict:
+ NewSectionDict[DT.TAB_ARCH_COMMON] = []
+ NewSectionDict[DT.TAB_ARCH_COMMON] = NewSectionDict[DT.TAB_ARCH_COMMON] + SpecialStatementList
+ GenMetaFileMisc.AddExternToDefineSec(NewSectionDict, DT.TAB_ARCH_COMMON, ExternList)
+ if ValidArchStatement is not None:
+ NewSectionDict[DT.TAB_ARCH_COMMON] = NewSectionDict[DT.TAB_ARCH_COMMON] + [ValidArchStatement]
+ Content += GenSection('Defines', NewSectionDict)
+ return Content
+
+def GenLibraryClasses(ModuleObject):
+ #
+ # generate [LibraryClasses] section
+ #
+ Content = ''
+ NewSectionDict = {}
+ if not GlobalData.gIS_BINARY_INF:
+ for LibraryClass in ModuleObject.GetLibraryClassList():
+ if LibraryClass.GetUsage() == DT.USAGE_ITEM_PRODUCES:
+ continue
+ #
+ # Generate generic comment
+ #
+ HelpTextList = LibraryClass.GetHelpTextList()
+ HelpStr = _GetHelpStr(HelpTextList)
+ CommentStr = GenGenericCommentF(HelpStr)
+ Statement = CommentStr
+ Name = LibraryClass.GetLibraryClass()
+ FFE = LibraryClass.GetFeatureFlag()
+ Statement += Name
+ if FFE:
+ Statement += '|' + FFE
+ ModuleList = LibraryClass.GetSupModuleList()
+ ArchList = LibraryClass.GetSupArchList()
+ for Index in range(0, len(ArchList)):
+ ArchList[Index] = ConvertArchForInstall(ArchList[Index])
+ ArchList.sort()
+ SortedArch = ' '.join(ArchList)
+ KeyList = []
+ if not ModuleList or IsAllModuleList(ModuleList):
+ KeyList = [SortedArch]
+ else:
+ ModuleString = DT.TAB_VALUE_SPLIT.join(l for l in ModuleList)
+ if not ArchList:
+ SortedArch = DT.TAB_ARCH_COMMON
+ KeyList = [SortedArch + '.' + ModuleString]
+ else:
+ KeyList = [Arch + '.' + ModuleString for Arch in ArchList]
+ for Key in KeyList:
+ if Key in NewSectionDict:
+ NewSectionDict[Key] = NewSectionDict[Key] + [Statement]
+ else:
+ NewSectionDict[Key] = [Statement]
+ Content += GenSection('LibraryClasses', NewSectionDict)
+ else:
+ LibraryClassDict = {}
+ for BinaryFile in ModuleObject.GetBinaryFileList():
+ if not BinaryFile.AsBuiltList:
+ continue
+ for LibraryItem in BinaryFile.AsBuiltList[0].LibraryInstancesList:
+ Statement = '# Guid: ' + LibraryItem.Guid + ' Version: ' + LibraryItem.Version
+
+ if len(BinaryFile.SupArchList) == 0:
+ if 'COMMON' in LibraryClassDict and Statement not in LibraryClassDict['COMMON']:
+ LibraryClassDict['COMMON'].append(Statement)
+ else:
+ LibraryClassDict['COMMON'] = ['## @LIB_INSTANCES']
+ LibraryClassDict['COMMON'].append(Statement)
+ else:
+ for Arch in BinaryFile.SupArchList:
+ if Arch in LibraryClassDict:
+ if Statement not in LibraryClassDict[Arch]:
+ LibraryClassDict[Arch].append(Statement)
+ else:
+ continue
+ else:
+ LibraryClassDict[Arch] = ['## @LIB_INSTANCES']
+ LibraryClassDict[Arch].append(Statement)
+ Content += GenSection('LibraryClasses', LibraryClassDict)
+
+ return Content
+
+def GenPackages(ModuleObject):
+ Content = ''
+ #
+ # generate [Packages] section
+ #
+ NewSectionDict = Sdict()
+ WorkspaceDir = GlobalData.gWORKSPACE
+ for PackageDependency in ModuleObject.GetPackageDependencyList():
+ #
+ # Generate generic comment
+ #
+ CommentStr = ''
+ HelpText = PackageDependency.GetHelpText()
+ if HelpText:
+ HelpStr = HelpText.GetString()
+ CommentStr = GenGenericCommentF(HelpStr)
+ Statement = CommentStr
+ Guid = PackageDependency.GetGuid()
+ Version = PackageDependency.GetVersion()
+ FFE = PackageDependency.GetFeatureFlag()
+ Path = ''
+ #
+ # find package path/name
+ #
+ for PkgInfo in GlobalData.gWSPKG_LIST:
+ if Guid == PkgInfo[1]:
+ if (not Version) or (Version == PkgInfo[2]):
+ Path = PkgInfo[3]
+ break
+ #
+ # get relative path
+ #
+ RelaPath = GetRelativePath(Path, WorkspaceDir)
+ Statement += RelaPath.replace('\\', '/')
+ if FFE:
+ Statement += '|' + FFE
+ ArchList = sorted(PackageDependency.GetSupArchList())
+ SortedArch = ' '.join(ArchList)
+ if SortedArch in NewSectionDict:
+ NewSectionDict[SortedArch] = NewSectionDict[SortedArch] + [Statement]
+ else:
+ NewSectionDict[SortedArch] = [Statement]
+ Content += GenSection('Packages', NewSectionDict)
+ return Content
+
+def GenSources(ModuleObject):
+ #
+ # generate [Sources] section
+ #
+ Content = ''
+ NewSectionDict = {}
+ for Source in ModuleObject.GetSourceFileList():
+ SourceFile = Source.GetSourceFile()
+ Family = Source.GetFamily()
+ FeatureFlag = Source.GetFeatureFlag()
+ SupArchList = sorted(Source.GetSupArchList())
+ SortedArch = ' '.join(SupArchList)
+ Statement = GenSourceStatement(ConvertPath(SourceFile), Family, FeatureFlag)
+ if SortedArch in NewSectionDict:
+ NewSectionDict[SortedArch] = NewSectionDict[SortedArch] + [Statement]
+ else:
+ NewSectionDict[SortedArch] = [Statement]
+ Content += GenSection('Sources', NewSectionDict)
+
+ return Content
+
+def GenDepex(ModuleObject):
+ #
+ # generate [Depex] section
+ #
+ NewSectionDict = Sdict()
+ Content = ''
+ for Depex in ModuleObject.GetPeiDepex() + ModuleObject.GetDxeDepex() + ModuleObject.GetSmmDepex():
+ HelpTextList = Depex.GetHelpTextList()
+ HelpStr = _GetHelpStr(HelpTextList)
+ CommentStr = GenGenericCommentF(HelpStr)
+ SupArchList = Depex.GetSupArchList()
+ SupModList = Depex.GetModuleType()
+ Expression = Depex.GetDepex()
+ Statement = CommentStr + Expression
+ SupArchList.sort()
+ KeyList = []
+ if not SupArchList:
+ SupArchList.append(DT.TAB_ARCH_COMMON.lower())
+ if not SupModList:
+ KeyList = SupArchList
+ else:
+ for ModuleType in SupModList:
+ for Arch in SupArchList:
+ KeyList.append(ConvertArchForInstall(Arch) + '.' + ModuleType)
+ for Key in KeyList:
+ if Key in NewSectionDict:
+ NewSectionDict[Key] = NewSectionDict[Key] + [Statement]
+ else:
+ NewSectionDict[Key] = [Statement]
+ Content += GenSection('Depex', NewSectionDict, False)
+
+ return Content
+## GenUserExtensions
+#
+# GenUserExtensions
+#
+def GenUserExtensions(ModuleObject):
+ NewSectionDict = {}
+ for UserExtension in ModuleObject.GetUserExtensionList():
+ if UserExtension.GetUserID() == DT.TAB_BINARY_HEADER_USERID and \
+ UserExtension.GetIdentifier() == DT.TAB_BINARY_HEADER_IDENTIFIER:
+ continue
+ if UserExtension.GetIdentifier() == 'Depex':
+ continue
+ Statement = UserExtension.GetStatement()
+# Comment the code to support user extension without any statement just the section header in []
+# if not Statement:
+# continue
+ ArchList = UserExtension.GetSupArchList()
+ for Index in range(0, len(ArchList)):
+ ArchList[Index] = ConvertArchForInstall(ArchList[Index])
+ ArchList.sort()
+ KeyList = []
+ CommonPreFix = ''
+ if UserExtension.GetUserID():
+ CommonPreFix = UserExtension.GetUserID()
+ if CommonPreFix.find('.') > -1:
+ CommonPreFix = '"' + CommonPreFix + '"'
+ if UserExtension.GetIdentifier():
+ CommonPreFix += '.' + '"' + UserExtension.GetIdentifier() + '"'
+ if ArchList:
+ KeyList = [CommonPreFix + '.' + Arch for Arch in ArchList]
+ else:
+ KeyList = [CommonPreFix]
+ for Key in KeyList:
+ if Key in NewSectionDict:
+ NewSectionDict[Key] = NewSectionDict[Key] + [Statement]
+ else:
+ NewSectionDict[Key] = [Statement]
+ Content = GenSection('UserExtensions', NewSectionDict, False)
+
+ return Content
+
+# GenSourceStatement
+#
+# @param SourceFile: string of source file path/name
+# @param Family: string of source file family field
+# @param FeatureFlag: string of source file FeatureFlag field
+# @param TagName: string of source file TagName field
+# @param ToolCode: string of source file ToolCode field
+# @param HelpStr: string of source file HelpStr field
+#
+# @retval Statement: The generated statement for source
+#
+def GenSourceStatement(SourceFile, Family, FeatureFlag, TagName=None,
+ ToolCode=None, HelpStr=None):
+ Statement = ''
+ if HelpStr:
+ Statement += GenGenericCommentF(HelpStr)
+ #
+ # format of SourceFile|Family|TagName|ToolCode|FeatureFlag
+ #
+ Statement += SourceFile
+ if TagName is None:
+ TagName = ''
+ if ToolCode is None:
+ ToolCode = ''
+ if HelpStr is None:
+ HelpStr = ''
+ if FeatureFlag:
+ Statement += '|' + Family + '|' + TagName + '|' + ToolCode + '|' + FeatureFlag
+ elif ToolCode:
+ Statement += '|' + Family + '|' + TagName + '|' + ToolCode
+ elif TagName:
+ Statement += '|' + Family + '|' + TagName
+ elif Family:
+ Statement += '|' + Family
+ return Statement
+
+# GenBinaryStatement
+#
+# @param Key: (FileName, FileType, FFE, SortedArch)
+# @param Value: (Target, Family, TagName, Comment)
+#
+#
+def GenBinaryStatement(Key, Value, SubTypeGuidValue=None):
+ (FileName, FileType, FFE, SortedArch) = Key
+ if SortedArch:
+ pass
+ if Value:
+ (Target, Family, TagName, Comment) = Value
+ else:
+ Target = ''
+ Family = ''
+ TagName = ''
+ Comment = ''
+ if Comment:
+ Statement = GenGenericCommentF(Comment)
+ else:
+ Statement = ''
+ if FileType == 'SUBTYPE_GUID' and SubTypeGuidValue:
+ Statement += FileType + '|' + SubTypeGuidValue + '|' + FileName
+ else:
+ Statement += FileType + '|' + FileName
+ if FileType in DT.BINARY_FILE_TYPE_UI_LIST + DT.BINARY_FILE_TYPE_VER_LIST:
+ if FFE:
+ Statement += '|' + Target + '|' + FFE
+ elif Target:
+ Statement += '|' + Target
+ else:
+ if FFE:
+ Statement += '|' + Target + '|' + Family + '|' + TagName + '|' + FFE
+ elif TagName:
+ Statement += '|' + Target + '|' + Family + '|' + TagName
+ elif Family:
+ Statement += '|' + Target + '|' + Family
+ elif Target:
+ Statement += '|' + Target
+ return Statement
+## GenGuidSections
+#
+# @param GuidObjList: List of GuidObject
+# @retVal Content: The generated section contents
+#
+def GenGuidSections(GuidObjList):
+ #
+ # generate [Guids] section
+ #
+ Content = ''
+ GuidDict = Sdict()
+ for Guid in GuidObjList:
+ HelpTextList = Guid.GetHelpTextList()
+ HelpStr = _GetHelpStr(HelpTextList)
+ CName = Guid.GetCName()
+ FFE = Guid.GetFeatureFlag()
+ Statement = CName
+ if FFE:
+ Statement += '|' + FFE
+ Usage = Guid.GetUsage()
+ GuidType = Guid.GetGuidTypeList()[0]
+ VariableName = Guid.GetVariableName()
+ #
+ # Differentiate the generic comment and usage comment as multiple generic comment need to be put at first
+ #
+ if Usage == DT.ITEM_UNDEFINED and GuidType == DT.ITEM_UNDEFINED:
+ # generate list of generic comment
+ Comment = GenGenericCommentF(HelpStr)
+ else:
+ # generate list of other comment
+ Comment = HelpStr.replace('\n', ' ')
+ Comment = Comment.strip()
+ if Comment:
+ Comment = ' # ' + Comment
+ else:
+ Comment = ''
+ if Usage != DT.ITEM_UNDEFINED and GuidType == DT.ITEM_UNDEFINED:
+ Comment = '## ' + Usage + Comment
+ elif GuidType == 'Variable':
+ Comment = '## ' + Usage + ' ## ' + GuidType + ':' + VariableName + Comment
+ else:
+ Comment = '## ' + Usage + ' ## ' + GuidType + Comment
+
+ if Comment:
+ Comment += '\n'
+ #
+ # merge duplicate items
+ #
+ ArchList = sorted(Guid.GetSupArchList())
+ SortedArch = ' '.join(ArchList)
+ if (Statement, SortedArch) in GuidDict:
+ PreviousComment = GuidDict[Statement, SortedArch]
+ Comment = PreviousComment + Comment
+ GuidDict[Statement, SortedArch] = Comment
+ NewSectionDict = GenMetaFileMisc.TransferDict(GuidDict, 'INF_GUID')
+ #
+ # generate the section contents
+ #
+ if NewSectionDict:
+ Content = GenSection('Guids', NewSectionDict)
+
+ return Content
+
+## GenProtocolPPiSections
+#
+# @param ObjList: List of ProtocolObject or Ppi Object
+# @retVal Content: The generated section contents
+#
+def GenProtocolPPiSections(ObjList, IsProtocol):
+ Content = ''
+ Dict = Sdict()
+ for Object in ObjList:
+ HelpTextList = Object.GetHelpTextList()
+ HelpStr = _GetHelpStr(HelpTextList)
+ CName = Object.GetCName()
+ FFE = Object.GetFeatureFlag()
+ Statement = CName
+ if FFE:
+ Statement += '|' + FFE
+ Usage = Object.GetUsage()
+ Notify = Object.GetNotify()
+ #
+ # Differentiate the generic comment and usage comment as consecutive generic comment need to be put together
+ #
+ if Usage == DT.ITEM_UNDEFINED and Notify == '':
+ # generate list of generic comment
+ Comment = GenGenericCommentF(HelpStr)
+ else:
+ # generate list of other comment
+ Comment = HelpStr.replace('\n', ' ')
+ Comment = Comment.strip()
+ if Comment:
+ Comment = ' # ' + Comment
+ else:
+ Comment = ''
+ if Usage == DT.ITEM_UNDEFINED and not Comment and Notify == '':
+ Comment = ''
+ else:
+ if Notify:
+ Comment = '## ' + Usage + ' ## ' + 'NOTIFY' + Comment
+ else:
+ Comment = '## ' + Usage + Comment
+ if Comment:
+ Comment += '\n'
+ #
+ # merge duplicate items
+ #
+ ArchList = sorted(Object.GetSupArchList())
+ SortedArch = ' '.join(ArchList)
+ if (Statement, SortedArch) in Dict:
+ PreviousComment = Dict[Statement, SortedArch]
+ Comment = PreviousComment + Comment
+ Dict[Statement, SortedArch] = Comment
+ NewSectionDict = GenMetaFileMisc.TransferDict(Dict, 'INF_PPI_PROTOCOL')
+ #
+ # generate the section contents
+ #
+ if NewSectionDict:
+ if IsProtocol:
+ Content = GenSection('Protocols', NewSectionDict)
+ else:
+ Content = GenSection('Ppis', NewSectionDict)
+
+ return Content
+
+## GenPcdSections
+#
+#
+def GenPcdSections(ModuleObject):
+ Content = ''
+ if not GlobalData.gIS_BINARY_INF:
+ #
+ # for each Pcd Itemtype, maintain a dict so the same type will be grouped
+ # together
+ #
+ ItemTypeDict = {}
+ for Pcd in ModuleObject.GetPcdList():
+ HelpTextList = Pcd.GetHelpTextList()
+ HelpStr = _GetHelpStr(HelpTextList)
+ Statement = ''
+ CName = Pcd.GetCName()
+ TokenSpaceGuidCName = Pcd.GetTokenSpaceGuidCName()
+ DefaultValue = Pcd.GetDefaultValue()
+ ItemType = Pcd.GetItemType()
+ if ItemType in ItemTypeDict:
+ Dict = ItemTypeDict[ItemType]
+ else:
+ Dict = Sdict()
+ ItemTypeDict[ItemType] = Dict
+ FFE = Pcd.GetFeatureFlag()
+ Statement += TokenSpaceGuidCName + '.' + CName
+ if DefaultValue:
+ Statement += '|' + DefaultValue
+ if FFE:
+ Statement += '|' + FFE
+ elif FFE:
+ Statement += '||' + FFE
+ #
+ # Generate comment
+ #
+ Usage = Pcd.GetValidUsage()
+ # if FeatureFlag Pcd, then assume all Usage is CONSUMES
+ if ItemType == DT.TAB_INF_FEATURE_PCD:
+ Usage = DT.USAGE_ITEM_CONSUMES
+ if Usage == DT.ITEM_UNDEFINED:
+ # generate list of generic comment
+ Comment = GenGenericCommentF(HelpStr)
+ else:
+ # generate list of other comment
+ Comment = HelpStr.replace('\n', ' ')
+ Comment = Comment.strip()
+ if Comment:
+ Comment = ' # ' + Comment
+ else:
+ Comment = ''
+ Comment = '## ' + Usage + Comment
+ if Comment:
+ Comment += '\n'
+ #
+ # Merge duplicate entries
+ #
+ ArchList = sorted(Pcd.GetSupArchList())
+ SortedArch = ' '.join(ArchList)
+ if (Statement, SortedArch) in Dict:
+ PreviousComment = Dict[Statement, SortedArch]
+ Comment = PreviousComment + Comment
+ Dict[Statement, SortedArch] = Comment
+ for ItemType in ItemTypeDict:
+ # First we need to transfer the Dict to use SortedArch as key
+ Dict = ItemTypeDict[ItemType]
+ NewSectionDict = GenMetaFileMisc.TransferDict(Dict, 'INF_PCD')
+ if NewSectionDict:
+ Content += GenSection(ItemType, NewSectionDict)
+ #
+ # For AsBuild INF files
+ #
+ else:
+ Content += GenAsBuiltPacthPcdSections(ModuleObject)
+ Content += GenAsBuiltPcdExSections(ModuleObject)
+
+ return Content
+
+## GenPcdSections
+#
+#
+def GenAsBuiltPacthPcdSections(ModuleObject):
+ PatchPcdDict = {}
+ for BinaryFile in ModuleObject.GetBinaryFileList():
+ if not BinaryFile.AsBuiltList:
+ continue
+ for PatchPcd in BinaryFile.AsBuiltList[0].PatchPcdList:
+ TokenSpaceName = ''
+ PcdCName = PatchPcd.CName
+ PcdValue = PatchPcd.DefaultValue
+ PcdOffset = PatchPcd.Offset
+ TokenSpaceGuidValue = PatchPcd.TokenSpaceGuidValue
+ Token = PatchPcd.Token
+ HelpTextList = PatchPcd.HelpTextList
+ HelpString = ''
+ for HelpStringItem in HelpTextList:
+ for HelpLine in GetSplitValueList(HelpStringItem.String, '\n'):
+ HelpString += '## ' + HelpLine + '\n'
+ TokenSpaceName, PcdCName = GenMetaFileMisc.ObtainPcdName(ModuleObject.PackageDependencyList,
+ TokenSpaceGuidValue,
+ Token)
+ if TokenSpaceName == '' or PcdCName == '':
+ Logger.Error("Upt",
+ ToolError.RESOURCE_NOT_AVAILABLE,
+ ST.ERR_INSTALL_FILE_DEC_FILE_ERROR % (TokenSpaceGuidValue, Token),
+ File=ModuleObject.GetFullPath())
+ Statement = HelpString + TokenSpaceName + '.' + PcdCName + ' | ' + PcdValue + ' | ' + \
+ PcdOffset + DT.TAB_SPACE_SPLIT
+ #
+ # Use binary file's Arch to be Pcd's Arch
+ #
+ ArchList = []
+ FileNameObjList = BinaryFile.GetFileNameList()
+ if FileNameObjList:
+ ArchList = FileNameObjList[0].GetSupArchList()
+ if len(ArchList) == 0:
+ if DT.TAB_ARCH_COMMON in PatchPcdDict:
+ if Statement not in PatchPcdDict[DT.TAB_ARCH_COMMON]:
+ PatchPcdDict[DT.TAB_ARCH_COMMON].append(Statement)
+ else:
+ PatchPcdDict[DT.TAB_ARCH_COMMON] = [Statement]
+ else:
+ for Arch in ArchList:
+ if Arch in PatchPcdDict:
+ if Statement not in PatchPcdDict[Arch]:
+ PatchPcdDict[Arch].append(Statement)
+ else:
+ PatchPcdDict[Arch] = [Statement]
+ return GenSection(DT.TAB_INF_PATCH_PCD, PatchPcdDict)
+## GenPcdSections
+#
+#
+def GenAsBuiltPcdExSections(ModuleObject):
+ PcdExDict = {}
+ for BinaryFile in ModuleObject.GetBinaryFileList():
+ if not BinaryFile.AsBuiltList:
+ continue
+ for PcdExItem in BinaryFile.AsBuiltList[0].PcdExValueList:
+ TokenSpaceName = ''
+ PcdCName = PcdExItem.CName
+ TokenSpaceGuidValue = PcdExItem.TokenSpaceGuidValue
+ Token = PcdExItem.Token
+ HelpTextList = PcdExItem.HelpTextList
+ HelpString = ''
+ for HelpStringItem in HelpTextList:
+ for HelpLine in GetSplitValueList(HelpStringItem.String, '\n'):
+ HelpString += '## ' + HelpLine + '\n'
+ TokenSpaceName, PcdCName = GenMetaFileMisc.ObtainPcdName(ModuleObject.PackageDependencyList,
+ TokenSpaceGuidValue, Token)
+ if TokenSpaceName == '' or PcdCName == '':
+ Logger.Error("Upt",
+ ToolError.RESOURCE_NOT_AVAILABLE,
+ ST.ERR_INSTALL_FILE_DEC_FILE_ERROR % (TokenSpaceGuidValue, Token),
+ File=ModuleObject.GetFullPath())
+
+ Statement = HelpString + TokenSpaceName + DT.TAB_SPLIT + PcdCName + DT.TAB_SPACE_SPLIT
+
+ #
+ # Use binary file's Arch to be Pcd's Arch
+ #
+ ArchList = []
+ FileNameObjList = BinaryFile.GetFileNameList()
+ if FileNameObjList:
+ ArchList = FileNameObjList[0].GetSupArchList()
+
+ if len(ArchList) == 0:
+ if 'COMMON' in PcdExDict:
+ PcdExDict['COMMON'].append(Statement)
+ else:
+ PcdExDict['COMMON'] = [Statement]
+ else:
+ for Arch in ArchList:
+ if Arch in PcdExDict:
+ if Statement not in PcdExDict[Arch]:
+ PcdExDict[Arch].append(Statement)
+ else:
+ PcdExDict[Arch] = [Statement]
+ return GenSection('PcdEx', PcdExDict)
+
+## GenSpecialSections
+# generate special sections for Event/BootMode/Hob
+#
+def GenSpecialSections(ObjectList, SectionName, UserExtensionsContent=''):
+ #
+ # generate section
+ #
+ Content = ''
+ NewSectionDict = {}
+ for Obj in ObjectList:
+ #
+ # Generate comment
+ #
+ CommentStr = ''
+ HelpTextList = Obj.GetHelpTextList()
+ HelpStr = _GetHelpStr(HelpTextList)
+ CommentStr = GenGenericCommentF(HelpStr)
+ if SectionName == 'Hob':
+ Type = Obj.GetHobType()
+ elif SectionName == 'Event':
+ Type = Obj.GetEventType()
+ elif SectionName == 'BootMode':
+ Type = Obj.GetSupportedBootModes()
+ else:
+ assert(SectionName)
+ Usage = Obj.GetUsage()
+
+ # If the content already in UserExtensionsContent then ignore
+ if '[%s]' % SectionName in UserExtensionsContent and Type in UserExtensionsContent:
+ return ''
+
+ Statement = ' ' + Type + ' ## ' + Usage
+ if CommentStr in ['#\n', '#\n#\n']:
+ CommentStr = '#\n#\n#\n'
+ #
+ # the first head comment line should start with '##\n', if it starts with '#\n', then add one '#'
+ # else add '##\n' to meet the format defined in INF spec
+ #
+ if CommentStr.startswith('#\n'):
+ CommentStr = '#' + CommentStr
+ elif CommentStr:
+ CommentStr = '##\n' + CommentStr
+ if CommentStr and not CommentStr.endswith('\n#\n'):
+ CommentStr = CommentStr + '#\n'
+ NewStateMent = CommentStr + Statement
+ SupArch = sorted(Obj.GetSupArchList())
+ SortedArch = ' '.join(SupArch)
+ if SortedArch in NewSectionDict:
+ NewSectionDict[SortedArch] = NewSectionDict[SortedArch] + [NewStateMent]
+ else:
+ NewSectionDict[SortedArch] = [NewStateMent]
+ SectionContent = GenSection(SectionName, NewSectionDict)
+ SectionContent = SectionContent.strip()
+ if SectionContent:
+ Content = '# ' + ('\n' + '# ').join(GetSplitValueList(SectionContent, '\n'))
+ Content = Content.lstrip()
+ #
+ # add a return to differentiate it between other possible sections
+ #
+ if Content:
+ Content += '\n'
+ return Content
+## GenBuildOptions
+#
+#
+def GenBuildOptions(ModuleObject):
+ Content = ''
+ if not ModuleObject.BinaryModule:
+ #
+ # generate [BuildOptions] section
+ #
+ NewSectionDict = {}
+ for UserExtension in ModuleObject.GetUserExtensionList():
+ BuildOptionDict = UserExtension.GetBuildOptionDict()
+ if not BuildOptionDict:
+ continue
+ for Arch in BuildOptionDict:
+ if Arch in NewSectionDict:
+ NewSectionDict[Arch] = NewSectionDict[Arch] + [BuildOptionDict[Arch]]
+ else:
+ NewSectionDict[Arch] = [BuildOptionDict[Arch]]
+ Content = GenSection('BuildOptions', NewSectionDict)
+ else:
+ BuildOptionDict = {}
+ for BinaryFile in ModuleObject.GetBinaryFileList():
+ if not BinaryFile.AsBuiltList:
+ continue
+ for BuilOptionItem in BinaryFile.AsBuiltList[0].BinaryBuildFlagList:
+ Statement = '#' + BuilOptionItem.AsBuiltOptionFlags
+ if len(BinaryFile.SupArchList) == 0:
+ if 'COMMON' in BuildOptionDict:
+ if Statement not in BuildOptionDict['COMMON']:
+ BuildOptionDict['COMMON'].append(Statement)
+ else:
+ BuildOptionDict['COMMON'] = ['## @AsBuilt']
+ BuildOptionDict['COMMON'].append(Statement)
+ else:
+ for Arch in BinaryFile.SupArchList:
+ if Arch in BuildOptionDict:
+ if Statement not in BuildOptionDict[Arch]:
+ BuildOptionDict[Arch].append(Statement)
+ else:
+ BuildOptionDict[Arch] = ['## @AsBuilt']
+ BuildOptionDict[Arch].append(Statement)
+ Content = GenSection('BuildOptions', BuildOptionDict)
+
+ return Content
+## GenBinaries
+#
+#
+def GenBinaries(ModuleObject):
+ NewSectionDict = {}
+ BinariesDict = []
+ for UserExtension in ModuleObject.GetUserExtensionList():
+ BinariesDict = UserExtension.GetBinariesDict()
+ if BinariesDict:
+ break
+ for BinaryFile in ModuleObject.GetBinaryFileList():
+ FileNameObjList = BinaryFile.GetFileNameList()
+ for FileNameObj in FileNameObjList:
+ FileName = ConvertPath(FileNameObj.GetFilename())
+ FileType = FileNameObj.GetFileType()
+ FFE = FileNameObj.GetFeatureFlag()
+ ArchList = sorted(FileNameObj.GetSupArchList())
+ SortedArch = ' '.join(ArchList)
+ Key = (FileName, FileType, FFE, SortedArch)
+ if Key in BinariesDict:
+ ValueList = BinariesDict[Key]
+ for ValueItem in ValueList:
+ Statement = GenBinaryStatement(Key, ValueItem)
+ if SortedArch in NewSectionDict:
+ NewSectionDict[SortedArch] = NewSectionDict[SortedArch] + [Statement]
+ else:
+ NewSectionDict[SortedArch] = [Statement]
+ #
+ # as we already generated statement for this DictKey here set the Valuelist to be empty
+ # to avoid generate duplicate entries as the DictKey may have multiple entries
+ #
+ BinariesDict[Key] = []
+ else:
+ if FileType == 'SUBTYPE_GUID' and FileNameObj.GetGuidValue():
+ Statement = GenBinaryStatement(Key, None, FileNameObj.GetGuidValue())
+ else:
+ Statement = GenBinaryStatement(Key, None)
+ if SortedArch in NewSectionDict:
+ NewSectionDict[SortedArch] = NewSectionDict[SortedArch] + [Statement]
+ else:
+ NewSectionDict[SortedArch] = [Statement]
+ Content = GenSection('Binaries', NewSectionDict)
+
+ return Content
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenMetaFileMisc.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenMetaFileMisc.py
new file mode 100755
index 00000000..e506def3
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenMetaFileMisc.py
@@ -0,0 +1,188 @@
+## @file GenMetaFileMisc.py
+#
+# This file contained the miscellaneous routines for GenMetaFile usage.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+GenMetaFileMisc
+'''
+
+from Library import DataType as DT
+from Library import GlobalData
+from Parser.DecParser import Dec
+
+# AddExternToDefineSec
+#
+# @param SectionDict: string of source file path/name
+# @param Arch: string of source file family field
+# @param ExternList: string of source file FeatureFlag field
+#
+def AddExternToDefineSec(SectionDict, Arch, ExternList):
+ LeftOffset = 31
+ for ArchList, EntryPoint, UnloadImage, Constructor, Destructor, FFE, HelpStringList in ExternList:
+ if Arch or ArchList:
+ if EntryPoint:
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_ENTRY_POINT).ljust(LeftOffset) + u'= %s' % EntryPoint
+ if FFE:
+ Statement += ' | %s' % FFE
+ if len(HelpStringList) > 0:
+ Statement = HelpStringList[0].GetString() + '\n' + Statement
+ if len(HelpStringList) > 1:
+ Statement = Statement + HelpStringList[1].GetString()
+ SectionDict[Arch] = SectionDict[Arch] + [Statement]
+
+ if UnloadImage:
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_UNLOAD_IMAGE).ljust(LeftOffset) + u'= %s' % UnloadImage
+ if FFE:
+ Statement += ' | %s' % FFE
+
+ if len(HelpStringList) > 0:
+ Statement = HelpStringList[0].GetString() + '\n' + Statement
+ if len(HelpStringList) > 1:
+ Statement = Statement + HelpStringList[1].GetString()
+ SectionDict[Arch] = SectionDict[Arch] + [Statement]
+
+ if Constructor:
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_CONSTRUCTOR).ljust(LeftOffset) + u'= %s' % Constructor
+ if FFE:
+ Statement += ' | %s' % FFE
+
+ if len(HelpStringList) > 0:
+ Statement = HelpStringList[0].GetString() + '\n' + Statement
+ if len(HelpStringList) > 1:
+ Statement = Statement + HelpStringList[1].GetString()
+ SectionDict[Arch] = SectionDict[Arch] + [Statement]
+
+ if Destructor:
+ Statement = (u'%s ' % DT.TAB_INF_DEFINES_DESTRUCTOR).ljust(LeftOffset) + u'= %s' % Destructor
+ if FFE:
+ Statement += ' | %s' % FFE
+
+ if len(HelpStringList) > 0:
+ Statement = HelpStringList[0].GetString() + '\n' + Statement
+ if len(HelpStringList) > 1:
+ Statement = Statement + HelpStringList[1].GetString()
+ SectionDict[Arch] = SectionDict[Arch] + [Statement]
+
+## ObtainPcdName
+#
+# Using TokenSpaceGuidValue and Token to obtain PcdName from DEC file
+#
+def ObtainPcdName(Packages, TokenSpaceGuidValue, Token):
+ TokenSpaceGuidName = ''
+ PcdCName = ''
+ TokenSpaceGuidNameFound = False
+
+ for PackageDependency in Packages:
+ #
+ # Generate generic comment
+ #
+ Guid = PackageDependency.GetGuid()
+ Version = PackageDependency.GetVersion()
+
+ Path = None
+ #
+ # find package path/name
+ #
+ for PkgInfo in GlobalData.gWSPKG_LIST:
+ if Guid == PkgInfo[1]:
+ if (not Version) or (Version == PkgInfo[2]):
+ Path = PkgInfo[3]
+ break
+
+ # The dependency package in workspace
+ if Path:
+ DecFile = None
+ if Path not in GlobalData.gPackageDict:
+ DecFile = Dec(Path)
+ GlobalData.gPackageDict[Path] = DecFile
+ else:
+ DecFile = GlobalData.gPackageDict[Path]
+
+ DecGuidsDict = DecFile.GetGuidSectionObject().ValueDict
+ DecPcdsDict = DecFile.GetPcdSectionObject().ValueDict
+
+ TokenSpaceGuidName = ''
+ PcdCName = ''
+ TokenSpaceGuidNameFound = False
+
+ #
+ # Get TokenSpaceGuidCName from Guids section
+ #
+ for GuidKey in DecGuidsDict:
+ GuidList = DecGuidsDict[GuidKey]
+ for GuidItem in GuidList:
+ if TokenSpaceGuidValue.upper() == GuidItem.GuidString.upper():
+ TokenSpaceGuidName = GuidItem.GuidCName
+ TokenSpaceGuidNameFound = True
+ break
+ if TokenSpaceGuidNameFound:
+ break
+ #
+ # Retrieve PcdCName from Pcds Section
+ #
+ for PcdKey in DecPcdsDict:
+ PcdList = DecPcdsDict[PcdKey]
+ for PcdItem in PcdList:
+ if TokenSpaceGuidName == PcdItem.TokenSpaceGuidCName and Token == PcdItem.TokenValue:
+ PcdCName = PcdItem.TokenCName
+ return TokenSpaceGuidName, PcdCName
+
+ # The dependency package in ToBeInstalledDist
+ else:
+ for Dist in GlobalData.gTO_BE_INSTALLED_DIST_LIST:
+ for Package in Dist.PackageSurfaceArea.values():
+ if Guid == Package.Guid:
+ for GuidItem in Package.GuidList:
+ if TokenSpaceGuidValue.upper() == GuidItem.Guid.upper():
+ TokenSpaceGuidName = GuidItem.CName
+ TokenSpaceGuidNameFound = True
+ break
+ for PcdItem in Package.PcdList:
+ if TokenSpaceGuidName == PcdItem.TokenSpaceGuidCName and Token == PcdItem.Token:
+ PcdCName = PcdItem.CName
+ return TokenSpaceGuidName, PcdCName
+
+ return TokenSpaceGuidName, PcdCName
+
+## _TransferDict
+# transfer dict that using (Statement, SortedArch) as key,
+# (GenericComment, UsageComment) as value into a dict that using SortedArch as
+# key and NewStatement as value
+#
+def TransferDict(OrigDict, Type=None):
+ NewDict = {}
+ LeftOffset = 0
+ if Type in ['INF_GUID', 'INF_PPI_PROTOCOL']:
+ LeftOffset = 45
+ if Type in ['INF_PCD']:
+ LeftOffset = 75
+ if LeftOffset > 0:
+ for Statement, SortedArch in OrigDict:
+ if len(Statement) > LeftOffset:
+ LeftOffset = len(Statement)
+
+ for Statement, SortedArch in OrigDict:
+ Comment = OrigDict[Statement, SortedArch]
+ #
+ # apply the NComment/1Comment rule
+ #
+ if Comment.find('\n') != len(Comment) - 1:
+ NewStateMent = Comment + Statement
+ else:
+ if LeftOffset:
+ NewStateMent = Statement.ljust(LeftOffset) + ' ' + Comment.rstrip('\n')
+ else:
+ NewStateMent = Statement + ' ' + Comment.rstrip('\n')
+
+ if SortedArch in NewDict:
+ NewDict[SortedArch] = NewDict[SortedArch] + [NewStateMent]
+ else:
+ NewDict[SortedArch] = [NewStateMent]
+
+ return NewDict
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenXmlFile.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenXmlFile.py
new file mode 100644
index 00000000..e2f7069f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/GenXmlFile.py
@@ -0,0 +1,12 @@
+## @file GenXmlFile.py
+#
+# This file contained the logical of generate XML files.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+GenXmlFile
+'''
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/__init__.py
new file mode 100644
index 00000000..4174964f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/GenMetaFile/__init__.py
@@ -0,0 +1,14 @@
+## @file
+# Python 'Library' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+GenMetaFile
+'''
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/InstallPkg.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/InstallPkg.py
new file mode 100755
index 00000000..3b57b705
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/InstallPkg.py
@@ -0,0 +1,967 @@
+## @file
+# Install distribution package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+"""
+Install a distribution package
+"""
+##
+# Import Modules
+#
+from Core.FileHook import __FileHookOpen__
+import os.path
+from os import chmod
+from os import SEEK_SET
+from os import SEEK_END
+import stat
+from hashlib import md5
+import copy
+from sys import stdin
+from sys import platform
+from shutil import rmtree
+from shutil import copyfile
+from traceback import format_exc
+from platform import python_version
+
+from Logger import StringTable as ST
+from Logger.ToolError import UNKNOWN_ERROR
+from Logger.ToolError import FILE_UNKNOWN_ERROR
+from Logger.ToolError import OPTION_MISSING
+from Logger.ToolError import UPT_ALREADY_INSTALLED_ERROR
+from Logger.ToolError import FatalError
+from Logger.ToolError import ABORT_ERROR
+from Logger.ToolError import CODE_ERROR
+from Logger.ToolError import FORMAT_INVALID
+from Logger.ToolError import FILE_TYPE_MISMATCH
+import Logger.Log as Logger
+
+from Library.Misc import Sdict
+from Library.Misc import ConvertPath
+from Library.ParserValidate import IsValidInstallPath
+from Xml.XmlParser import DistributionPackageXml
+from GenMetaFile.GenDecFile import PackageToDec
+from GenMetaFile.GenInfFile import ModuleToInf
+from Core.PackageFile import PackageFile
+from Core.PackageFile import FILE_NOT_FOUND
+from Core.PackageFile import FILE_CHECKSUM_FAILURE
+from Core.PackageFile import CreateDirectory
+from Core.DependencyRules import DependencyRules
+from Library import GlobalData
+
+## InstallNewPackage
+#
+# @param WorkspaceDir: Workspace Directory
+# @param Path: Package Path
+# @param CustomPath: whether need to customize path at first
+#
+def InstallNewPackage(WorkspaceDir, Path, CustomPath = False):
+ if os.path.isabs(Path):
+ Logger.Info(ST.MSG_RELATIVE_PATH_ONLY%Path)
+ elif CustomPath:
+ Logger.Info(ST.MSG_NEW_PKG_PATH)
+ else:
+ Path = ConvertPath(Path)
+ Path = os.path.normpath(Path)
+ FullPath = os.path.normpath(os.path.join(WorkspaceDir, Path))
+ if os.path.exists(FullPath):
+ Logger.Info(ST.ERR_DIR_ALREADY_EXIST%FullPath)
+ else:
+ return Path
+
+ Input = stdin.readline()
+ Input = Input.replace('\r', '').replace('\n', '')
+ if Input == '':
+ Logger.Error("InstallPkg", UNKNOWN_ERROR, ST.ERR_USER_INTERRUPT)
+ Input = Input.replace('\r', '').replace('\n', '')
+ return InstallNewPackage(WorkspaceDir, Input, False)
+
+## InstallNewModule
+#
+# @param WorkspaceDir: Workspace Directory
+# @param Path: Standalone Module Path
+# @param PathList: The already installed standalone module Path list
+#
+def InstallNewModule(WorkspaceDir, Path, PathList = None):
+ if PathList is None:
+ PathList = []
+ Path = ConvertPath(Path)
+ Path = os.path.normpath(Path)
+ FullPath = os.path.normpath(os.path.join(WorkspaceDir, Path))
+ if os.path.exists(FullPath) and FullPath not in PathList:
+ Logger.Info(ST.ERR_DIR_ALREADY_EXIST%Path)
+ elif Path == FullPath:
+ Logger.Info(ST.MSG_RELATIVE_PATH_ONLY%FullPath)
+ else:
+ return Path
+
+ Input = stdin.readline()
+ Input = Input.replace('\r', '').replace('\n', '')
+ if Input == '':
+ Logger.Error("InstallPkg", UNKNOWN_ERROR, ST.ERR_USER_INTERRUPT)
+ Input = Input.replace('\r', '').replace('\n', '')
+ return InstallNewModule(WorkspaceDir, Input, PathList)
+
+
+## InstallNewFile
+#
+# @param WorkspaceDir: Workspace Direction
+# @param File: File
+#
+def InstallNewFile(WorkspaceDir, File):
+ FullPath = os.path.normpath(os.path.join(WorkspaceDir, File))
+ if os.path.exists(FullPath):
+ Logger.Info(ST.ERR_FILE_ALREADY_EXIST %File)
+ Input = stdin.readline()
+ Input = Input.replace('\r', '').replace('\n', '')
+ if Input == '':
+ Logger.Error("InstallPkg", UNKNOWN_ERROR, ST.ERR_USER_INTERRUPT)
+ Input = Input.replace('\r', '').replace('\n', '')
+ return InstallNewFile(WorkspaceDir, Input)
+ else:
+ return File
+
+## UnZipDp
+#
+# UnZipDp
+#
+def UnZipDp(WorkspaceDir, DpPkgFileName, Index=1):
+ ContentZipFile = None
+ Logger.Quiet(ST.MSG_UZIP_PARSE_XML)
+ DistFile = PackageFile(DpPkgFileName)
+
+ DpDescFileName, ContentFileName = GetDPFile(DistFile.GetZipFile())
+
+ TempDir = os.path.normpath(os.path.join(WorkspaceDir, "Conf/.tmp%s" % str(Index)))
+ GlobalData.gUNPACK_DIR.append(TempDir)
+ DistPkgFile = DistFile.UnpackFile(DpDescFileName, os.path.normpath(os.path.join(TempDir, DpDescFileName)))
+ if not DistPkgFile:
+ Logger.Error("InstallPkg", FILE_NOT_FOUND, ST.ERR_FILE_BROKEN %DpDescFileName)
+
+ #
+ # Generate distpkg
+ #
+ DistPkgObj = DistributionPackageXml()
+ DistPkg = DistPkgObj.FromXml(DistPkgFile)
+ if DistPkg.Header.RePackage == '':
+ DistPkg.Header.RePackage = False
+ if DistPkg.Header.ReadOnly == '':
+ DistPkg.Header.ReadOnly = False
+
+ #
+ # unzip contents.zip file
+ #
+ ContentFile = DistFile.UnpackFile(ContentFileName, os.path.normpath(os.path.join(TempDir, ContentFileName)))
+ if not ContentFile:
+ Logger.Error("InstallPkg", FILE_NOT_FOUND,
+ ST.ERR_FILE_BROKEN % ContentFileName)
+
+ #
+ # Get file size
+ #
+ FileSize = os.path.getsize(ContentFile)
+
+ if FileSize != 0:
+ ContentZipFile = PackageFile(ContentFile)
+
+ #
+ # verify MD5 signature when existed
+ #
+ if DistPkg.Header.Signature != '':
+ Md5Signature = md5(__FileHookOpen__(ContentFile, 'rb').read())
+ if DistPkg.Header.Signature != Md5Signature.hexdigest():
+ ContentZipFile.Close()
+ Logger.Error("InstallPkg", FILE_CHECKSUM_FAILURE,
+ ExtraData=ContentFile)
+
+ return DistPkg, ContentZipFile, DpPkgFileName, DistFile
+
+## GetPackageList
+#
+# GetPackageList
+#
+def GetPackageList(DistPkg, Dep, WorkspaceDir, Options, ContentZipFile, ModuleList, PackageList):
+ NewDict = Sdict()
+ for Guid, Version, Path in DistPkg.PackageSurfaceArea:
+ PackagePath = Path
+ Package = DistPkg.PackageSurfaceArea[Guid, Version, Path]
+ Logger.Info(ST.MSG_INSTALL_PACKAGE % Package.GetName())
+# if Dep.CheckPackageExists(Guid, Version):
+# Logger.Info(ST.WRN_PACKAGE_EXISTED %(Guid, Version))
+ if Options.UseGuidedPkgPath:
+ GuidedPkgPath = "%s_%s_%s" % (Package.GetName(), Guid, Version)
+ NewPackagePath = InstallNewPackage(WorkspaceDir, GuidedPkgPath, Options.CustomPath)
+ else:
+ NewPackagePath = InstallNewPackage(WorkspaceDir, PackagePath, Options.CustomPath)
+ InstallPackageContent(PackagePath, NewPackagePath, Package, ContentZipFile, Dep, WorkspaceDir, ModuleList,
+ DistPkg.Header.ReadOnly)
+ PackageList.append(Package)
+
+ NewDict[Guid, Version, Package.GetPackagePath()] = Package
+
+ #
+ # Now generate meta-data files, first generate all dec for package
+ # dec should be generated before inf, and inf should be generated after
+ # all packages installed, else hard to resolve modules' package
+ # dependency (Hard to get the location of the newly installed package)
+ #
+ for Package in PackageList:
+ FilePath = PackageToDec(Package, DistPkg.Header)
+ Md5Signature = md5(__FileHookOpen__(str(FilePath), 'rb').read())
+ Md5Sum = Md5Signature.hexdigest()
+ if (FilePath, Md5Sum) not in Package.FileList:
+ Package.FileList.append((FilePath, Md5Sum))
+
+ return NewDict
+
+## GetModuleList
+#
+# GetModuleList
+#
+def GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList):
+ #
+ # ModulePathList will keep track of the standalone module path that
+ # we just installed. If a new module's path in that list
+ # (only multiple INF in one directory will be so), we will
+ # install them directly. If not, we will try to create a new directory
+ # for it.
+ #
+ ModulePathList = []
+
+ #
+ # Check module exist and install
+ #
+ Module = None
+ NewDict = Sdict()
+ for Guid, Version, Name, Path in DistPkg.ModuleSurfaceArea:
+ ModulePath = Path
+ Module = DistPkg.ModuleSurfaceArea[Guid, Version, Name, Path]
+ Logger.Info(ST.MSG_INSTALL_MODULE % Module.GetName())
+ if Dep.CheckModuleExists(Guid, Version, Name, Path):
+ Logger.Quiet(ST.WRN_MODULE_EXISTED %Path)
+ #
+ # here check for the multiple inf share the same module path cases:
+ # they should be installed into the same directory
+ #
+ ModuleFullPath = \
+ os.path.normpath(os.path.join(WorkspaceDir, ModulePath))
+ if ModuleFullPath not in ModulePathList:
+ NewModulePath = InstallNewModule(WorkspaceDir, ModulePath, ModulePathList)
+ NewModuleFullPath = os.path.normpath(os.path.join(WorkspaceDir, NewModulePath))
+ ModulePathList.append(NewModuleFullPath)
+ else:
+ NewModulePath = ModulePath
+
+ InstallModuleContent(ModulePath, NewModulePath, '', Module, ContentZipFile, WorkspaceDir, ModuleList, None,
+ DistPkg.Header.ReadOnly)
+ #
+ # Update module
+ #
+ Module.SetModulePath(Module.GetModulePath().replace(Path, NewModulePath, 1))
+
+ NewDict[Guid, Version, Name, Module.GetModulePath()] = Module
+
+ #
+ # generate all inf for modules
+ #
+ for (Module, Package) in ModuleList:
+ CheckCNameInModuleRedefined(Module, DistPkg)
+ FilePath = ModuleToInf(Module, Package, DistPkg.Header)
+ Md5Signature = md5(__FileHookOpen__(str(FilePath), 'rb').read())
+ Md5Sum = Md5Signature.hexdigest()
+ if Package:
+ if (FilePath, Md5Sum) not in Package.FileList:
+ Package.FileList.append((FilePath, Md5Sum))
+ else:
+ if (FilePath, Md5Sum) not in Module.FileList:
+ Module.FileList.append((FilePath, Md5Sum))
+ #
+ # append the module unicode files to Package FileList
+ #
+ for (FilePath, Md5Sum) in Module.FileList:
+ if str(FilePath).endswith('.uni') and Package and (FilePath, Md5Sum) not in Package.FileList:
+ Package.FileList.append((FilePath, Md5Sum))
+
+ return NewDict
+
+##
+# Get all protocol/ppi/guid CNames and pcd name from all dependent DEC file
+#
+def GetDepProtocolPpiGuidPcdNames(DePackageObjList):
+ #
+ # [[Dec1Protocol1, Dec1Protocol2...], [Dec2Protocols...],...]
+ #
+ DependentProtocolCNames = []
+ DependentPpiCNames = []
+ DependentGuidCNames = []
+ DependentPcdNames = []
+
+ for PackageObj in DePackageObjList:
+ #
+ # Get protocol CName list from all dependent DEC file
+ #
+ ProtocolCNames = []
+ for Protocol in PackageObj.GetProtocolList():
+ if Protocol.GetCName() not in ProtocolCNames:
+ ProtocolCNames.append(Protocol.GetCName())
+
+ DependentProtocolCNames.append(ProtocolCNames)
+
+ #
+ # Get Ppi CName list from all dependent DEC file
+ #
+ PpiCNames = []
+ for Ppi in PackageObj.GetPpiList():
+ if Ppi.GetCName() not in PpiCNames:
+ PpiCNames.append(Ppi.GetCName())
+
+ DependentPpiCNames.append(PpiCNames)
+
+ #
+ # Get Guid CName list from all dependent DEC file
+ #
+ GuidCNames = []
+ for Guid in PackageObj.GetGuidList():
+ if Guid.GetCName() not in GuidCNames:
+ GuidCNames.append(Guid.GetCName())
+
+ DependentGuidCNames.append(GuidCNames)
+
+ #
+ # Get PcdName list from all dependent DEC file
+ #
+ PcdNames = []
+ for Pcd in PackageObj.GetPcdList():
+ PcdName = '.'.join([Pcd.GetTokenSpaceGuidCName(), Pcd.GetCName()])
+ if PcdName not in PcdNames:
+ PcdNames.append(PcdName)
+
+ DependentPcdNames.append(PcdNames)
+
+
+ return DependentProtocolCNames, DependentPpiCNames, DependentGuidCNames, DependentPcdNames
+
+##
+# Check if protocol CName is redefined
+#
+def CheckProtoclCNameRedefined(Module, DependentProtocolCNames):
+ for ProtocolInModule in Module.GetProtocolList():
+ IsCNameDefined = False
+ for PackageProtocolCNames in DependentProtocolCNames:
+ if ProtocolInModule.GetCName() in PackageProtocolCNames:
+ if IsCNameDefined:
+ Logger.Error("\nUPT", FORMAT_INVALID,
+ File = Module.GetFullPath(),
+ ExtraData = \
+ ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % ProtocolInModule.GetCName())
+ else:
+ IsCNameDefined = True
+
+##
+# Check if Ppi CName is redefined
+#
+def CheckPpiCNameRedefined(Module, DependentPpiCNames):
+ for PpiInModule in Module.GetPpiList():
+ IsCNameDefined = False
+ for PackagePpiCNames in DependentPpiCNames:
+ if PpiInModule.GetCName() in PackagePpiCNames:
+ if IsCNameDefined:
+ Logger.Error("\nUPT", FORMAT_INVALID,
+ File = Module.GetFullPath(),
+ ExtraData = ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % PpiInModule.GetCName())
+ else:
+ IsCNameDefined = True
+
+##
+# Check if Guid CName is redefined
+#
+def CheckGuidCNameRedefined(Module, DependentGuidCNames):
+ for GuidInModule in Module.GetGuidList():
+ IsCNameDefined = False
+ for PackageGuidCNames in DependentGuidCNames:
+ if GuidInModule.GetCName() in PackageGuidCNames:
+ if IsCNameDefined:
+ Logger.Error("\nUPT", FORMAT_INVALID,
+ File = Module.GetFullPath(),
+ ExtraData = \
+ ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % GuidInModule.GetCName())
+ else:
+ IsCNameDefined = True
+
+##
+# Check if PcdName is redefined
+#
+def CheckPcdNameRedefined(Module, DependentPcdNames):
+ PcdObjs = []
+ if not Module.GetBinaryFileList():
+ PcdObjs += Module.GetPcdList()
+ else:
+ Binary = Module.GetBinaryFileList()[0]
+ for AsBuild in Binary.GetAsBuiltList():
+ PcdObjs += AsBuild.GetPatchPcdList() + AsBuild.GetPcdExList()
+
+ for PcdObj in PcdObjs:
+ PcdName = '.'.join([PcdObj.GetTokenSpaceGuidCName(), PcdObj.GetCName()])
+ IsPcdNameDefined = False
+ for PcdNames in DependentPcdNames:
+ if PcdName in PcdNames:
+ if IsPcdNameDefined:
+ Logger.Error("\nUPT", FORMAT_INVALID,
+ File = Module.GetFullPath(),
+ ExtraData = ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % PcdName)
+ else:
+ IsPcdNameDefined = True
+
+##
+# Check if any Protocol/Ppi/Guid and Pcd name is redefined in its dependent DEC files
+#
+def CheckCNameInModuleRedefined(Module, DistPkg):
+ DePackageObjList = []
+ #
+ # Get all dependent package objects
+ #
+ for Obj in Module.GetPackageDependencyList():
+ Guid = Obj.GetGuid()
+ Version = Obj.GetVersion()
+ for Key in DistPkg.PackageSurfaceArea:
+ if Key[0] == Guid and Key[1] == Version:
+ if DistPkg.PackageSurfaceArea[Key] not in DePackageObjList:
+ DePackageObjList.append(DistPkg.PackageSurfaceArea[Key])
+
+ DependentProtocolCNames, DependentPpiCNames, DependentGuidCNames, DependentPcdNames = \
+ GetDepProtocolPpiGuidPcdNames(DePackageObjList)
+
+ CheckProtoclCNameRedefined(Module, DependentProtocolCNames)
+ CheckPpiCNameRedefined(Module, DependentPpiCNames)
+ CheckGuidCNameRedefined(Module, DependentGuidCNames)
+ CheckPcdNameRedefined(Module, DependentPcdNames)
+
+## GenToolMisc
+#
+# GenToolMisc
+#
+#
+def GenToolMisc(DistPkg, WorkspaceDir, ContentZipFile):
+ ToolObject = DistPkg.Tools
+ MiscObject = DistPkg.MiscellaneousFiles
+ DistPkg.FileList = []
+ FileList = []
+ ToolFileNum = 0
+ FileNum = 0
+ RootDir = WorkspaceDir
+
+ #
+ # FileList stores both tools files and misc files
+ # Misc file list must be appended to FileList *AFTER* Tools file list
+ #
+ if ToolObject:
+ FileList += ToolObject.GetFileList()
+ ToolFileNum = len(ToolObject.GetFileList())
+ if 'EDK_TOOLS_PATH' in os.environ:
+ RootDir = os.environ['EDK_TOOLS_PATH']
+ if MiscObject:
+ FileList += MiscObject.GetFileList()
+ for FileObject in FileList:
+ FileNum += 1
+ if FileNum > ToolFileNum:
+ #
+ # Misc files, root should be changed to WORKSPACE
+ #
+ RootDir = WorkspaceDir
+ File = ConvertPath(FileObject.GetURI())
+ ToFile = os.path.normpath(os.path.join(RootDir, File))
+ if os.path.exists(ToFile):
+ Logger.Info( ST.WRN_FILE_EXISTED % ToFile )
+ #
+ # ask for user input the new file name
+ #
+ Logger.Info( ST.MSG_NEW_FILE_NAME)
+ Input = stdin.readline()
+ Input = Input.replace('\r', '').replace('\n', '')
+ OrigPath = os.path.split(ToFile)[0]
+ ToFile = os.path.normpath(os.path.join(OrigPath, Input))
+ FromFile = os.path.join(FileObject.GetURI())
+ Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, DistPkg.Header.ReadOnly, FileObject.GetExecutable())
+ DistPkg.FileList.append((ToFile, Md5Sum))
+
+## Tool entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @param Options: command Options
+#
+def Main(Options = None):
+ try:
+ DataBase = GlobalData.gDB
+ WorkspaceDir = GlobalData.gWORKSPACE
+ if not Options.PackageFile:
+ Logger.Error("InstallPkg", OPTION_MISSING, ExtraData=ST.ERR_SPECIFY_PACKAGE)
+
+ # Get all Dist Info
+ DistInfoList = []
+ DistPkgList = []
+ Index = 1
+ for ToBeInstalledDist in Options.PackageFile:
+ #
+ # unzip dist.pkg file
+ #
+ DistInfoList.append(UnZipDp(WorkspaceDir, ToBeInstalledDist, Index))
+ DistPkgList.append(DistInfoList[-1][0])
+ Index += 1
+
+ #
+ # Add dist
+ #
+ GlobalData.gTO_BE_INSTALLED_DIST_LIST.append(DistInfoList[-1][0])
+
+ # Check for dependency
+ Dep = DependencyRules(DataBase, DistPkgList)
+
+ for ToBeInstalledDist in DistInfoList:
+ CheckInstallDpx(Dep, ToBeInstalledDist[0], ToBeInstalledDist[2])
+
+ #
+ # Install distribution
+ #
+ InstallDp(ToBeInstalledDist[0], ToBeInstalledDist[2], ToBeInstalledDist[1],
+ Options, Dep, WorkspaceDir, DataBase)
+ ReturnCode = 0
+
+ except FatalError as XExcept:
+ ReturnCode = XExcept.args[0]
+ if Logger.GetLevel() <= Logger.DEBUG_9:
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
+
+ except KeyboardInterrupt:
+ ReturnCode = ABORT_ERROR
+ if Logger.GetLevel() <= Logger.DEBUG_9:
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
+
+ except:
+ ReturnCode = CODE_ERROR
+ Logger.Error(
+ "\nInstallPkg",
+ CODE_ERROR,
+ ST.ERR_UNKNOWN_FATAL_INSTALL_ERR % Options.PackageFile,
+ ExtraData=ST.MSG_SEARCH_FOR_HELP % ST.MSG_EDKII_MAIL_ADDR,
+ RaiseError=False
+ )
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(),
+ platform) + format_exc())
+ finally:
+ Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_STARTED)
+ for ToBeInstalledDist in DistInfoList:
+ if ToBeInstalledDist[3]:
+ ToBeInstalledDist[3].Close()
+ if ToBeInstalledDist[1]:
+ ToBeInstalledDist[1].Close()
+ for TempDir in GlobalData.gUNPACK_DIR:
+ rmtree(TempDir)
+ GlobalData.gUNPACK_DIR = []
+ Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_DONE)
+ if ReturnCode == 0:
+ Logger.Quiet(ST.MSG_FINISH)
+ return ReturnCode
+
+# BackupDist method
+#
+# This method will backup the Distribution file into the $(WORKSPACE)/conf/upt, and rename it
+# if there is already a same-named distribution existed.
+#
+# @param DpPkgFileName: The distribution path
+# @param Guid: The distribution Guid
+# @param Version: The distribution Version
+# @param WorkspaceDir: The workspace directory
+# @retval NewDpPkgFileName: The exact backup file name
+#
+def BackupDist(DpPkgFileName, Guid, Version, WorkspaceDir):
+ DistFileName = os.path.split(DpPkgFileName)[1]
+ DestDir = os.path.normpath(os.path.join(WorkspaceDir, GlobalData.gUPT_DIR))
+ CreateDirectory(DestDir)
+ DestFile = os.path.normpath(os.path.join(DestDir, DistFileName))
+ if os.path.exists(DestFile):
+ FileName, Ext = os.path.splitext(DistFileName)
+ NewFileName = FileName + '_' + Guid + '_' + Version + Ext
+ DestFile = os.path.normpath(os.path.join(DestDir, NewFileName))
+ if os.path.exists(DestFile):
+ #
+ # ask for user input the new file name
+ #
+ Logger.Info( ST.MSG_NEW_FILE_NAME_FOR_DIST)
+ Input = stdin.readline()
+ Input = Input.replace('\r', '').replace('\n', '')
+ DestFile = os.path.normpath(os.path.join(DestDir, Input))
+ copyfile(DpPkgFileName, DestFile)
+ NewDpPkgFileName = DestFile[DestFile.find(DestDir) + len(DestDir) + 1:]
+ return NewDpPkgFileName
+
+## CheckInstallDpx method
+#
+# check whether distribution could be installed
+#
+# @param Dep: the DependencyRules instance that used to check dependency
+# @param DistPkg: the distribution object
+#
+def CheckInstallDpx(Dep, DistPkg, DistPkgFileName):
+ #
+ # Check distribution package installed or not
+ #
+ if Dep.CheckDpExists(DistPkg.Header.GetGuid(),
+ DistPkg.Header.GetVersion()):
+ Logger.Error("InstallPkg",
+ UPT_ALREADY_INSTALLED_ERROR,
+ ST.WRN_DIST_PKG_INSTALLED % os.path.basename(DistPkgFileName))
+ #
+ # Check distribution dependency (all module dependency should be
+ # satisfied)
+ #
+ if not Dep.CheckInstallDpDepexSatisfied(DistPkg):
+ Logger.Error("InstallPkg", UNKNOWN_ERROR,
+ ST.ERR_PACKAGE_NOT_MATCH_DEPENDENCY,
+ ExtraData=DistPkg.Header.Name)
+
+## InstallModuleContent method
+#
+# If this is standalone module, then Package should be none,
+# ModulePath should be ''
+# @param FromPath: FromPath
+# @param NewPath: NewPath
+# @param ModulePath: ModulePath
+# @param Module: Module
+# @param ContentZipFile: ContentZipFile
+# @param WorkspaceDir: WorkspaceDir
+# @param ModuleList: ModuleList
+# @param Package: Package
+#
+def InstallModuleContent(FromPath, NewPath, ModulePath, Module, ContentZipFile,
+ WorkspaceDir, ModuleList, Package = None, ReadOnly = False):
+
+ if NewPath.startswith("\\") or NewPath.startswith("/"):
+ NewPath = NewPath[1:]
+
+ if not IsValidInstallPath(NewPath):
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%NewPath)
+
+ NewModuleFullPath = os.path.normpath(os.path.join(WorkspaceDir, NewPath,
+ ConvertPath(ModulePath)))
+ Module.SetFullPath(os.path.normpath(os.path.join(NewModuleFullPath,
+ ConvertPath(Module.GetName()) + '.inf')))
+ Module.FileList = []
+
+ for MiscFile in Module.GetMiscFileList():
+ if not MiscFile:
+ continue
+ for Item in MiscFile.GetFileList():
+ File = Item.GetURI()
+ if File.startswith("\\") or File.startswith("/"):
+ File = File[1:]
+
+ if not IsValidInstallPath(File):
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
+
+ FromFile = os.path.join(FromPath, ModulePath, File)
+ Executable = Item.GetExecutable()
+ ToFile = os.path.normpath(os.path.join(NewModuleFullPath, ConvertPath(File)))
+ Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable)
+ if Package and ((ToFile, Md5Sum) not in Package.FileList):
+ Package.FileList.append((ToFile, Md5Sum))
+ elif Package:
+ continue
+ elif (ToFile, Md5Sum) not in Module.FileList:
+ Module.FileList.append((ToFile, Md5Sum))
+ for Item in Module.GetSourceFileList():
+ File = Item.GetSourceFile()
+ if File.startswith("\\") or File.startswith("/"):
+ File = File[1:]
+
+ if not IsValidInstallPath(File):
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
+
+ FromFile = os.path.join(FromPath, ModulePath, File)
+ ToFile = os.path.normpath(os.path.join(NewModuleFullPath, ConvertPath(File)))
+ Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
+ if Package and ((ToFile, Md5Sum) not in Package.FileList):
+ Package.FileList.append((ToFile, Md5Sum))
+ elif Package:
+ continue
+ elif (ToFile, Md5Sum) not in Module.FileList:
+ Module.FileList.append((ToFile, Md5Sum))
+ for Item in Module.GetBinaryFileList():
+ FileNameList = Item.GetFileNameList()
+ for FileName in FileNameList:
+ File = FileName.GetFilename()
+ if File.startswith("\\") or File.startswith("/"):
+ File = File[1:]
+
+ if not IsValidInstallPath(File):
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
+
+ FromFile = os.path.join(FromPath, ModulePath, File)
+ ToFile = os.path.normpath(os.path.join(NewModuleFullPath, ConvertPath(File)))
+ Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
+ if Package and ((ToFile, Md5Sum) not in Package.FileList):
+ Package.FileList.append((ToFile, Md5Sum))
+ elif Package:
+ continue
+ elif (ToFile, Md5Sum) not in Module.FileList:
+ Module.FileList.append((ToFile, Md5Sum))
+
+ InstallModuleContentZipFile(ContentZipFile, FromPath, ModulePath, WorkspaceDir, NewPath, Module, Package, ReadOnly,
+ ModuleList)
+
+## InstallModuleContentZipFile
+#
+# InstallModuleContentZipFile
+#
+def InstallModuleContentZipFile(ContentZipFile, FromPath, ModulePath, WorkspaceDir, NewPath, Module, Package, ReadOnly,
+ ModuleList):
+ #
+ # Extract other files under current module path in content Zip file but not listed in the description
+ #
+ if ContentZipFile:
+ for FileName in ContentZipFile.GetZipFile().namelist():
+ FileName = os.path.normpath(FileName)
+ CheckPath = os.path.normpath(os.path.join(FromPath, ModulePath))
+ if FileUnderPath(FileName, CheckPath):
+ if FileName.startswith("\\") or FileName.startswith("/"):
+ FileName = FileName[1:]
+
+ if not IsValidInstallPath(FileName):
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
+
+ FromFile = FileName
+ ToFile = os.path.normpath(os.path.join(WorkspaceDir,
+ ConvertPath(FileName.replace(FromPath, NewPath, 1))))
+ CheckList = copy.copy(Module.FileList)
+ if Package:
+ CheckList += Package.FileList
+ for Item in CheckList:
+ if Item[0] == ToFile:
+ break
+ else:
+ Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
+ if Package and ((ToFile, Md5Sum) not in Package.FileList):
+ Package.FileList.append((ToFile, Md5Sum))
+ elif Package:
+ continue
+ elif (ToFile, Md5Sum) not in Module.FileList:
+ Module.FileList.append((ToFile, Md5Sum))
+
+ ModuleList.append((Module, Package))
+
+## FileUnderPath
+# Check whether FileName started with directory specified by CheckPath
+#
+# @param FileName: the FileName need to be checked
+# @param CheckPath: the path need to be checked against
+# @return: True or False
+#
+def FileUnderPath(FileName, CheckPath):
+ FileName = FileName.replace('\\', '/')
+ FileName = os.path.normpath(FileName)
+ CheckPath = CheckPath.replace('\\', '/')
+ CheckPath = os.path.normpath(CheckPath)
+ if FileName.startswith(CheckPath):
+ RemainingPath = os.path.normpath(FileName.replace(CheckPath, '', 1))
+ while RemainingPath.startswith('\\') or RemainingPath.startswith('/'):
+ RemainingPath = RemainingPath[1:]
+ if FileName == os.path.normpath(os.path.join(CheckPath, RemainingPath)):
+ return True
+
+ return False
+
+## InstallFile
+# Extract File from Zipfile, set file attribute, and return the Md5Sum
+#
+# @return: True or False
+#
+def InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable=False):
+ if os.path.exists(os.path.normpath(ToFile)):
+ pass
+ else:
+ if not ContentZipFile or not ContentZipFile.UnpackFile(FromFile, ToFile):
+ Logger.Error("UPT", FILE_NOT_FOUND, ST.ERR_INSTALL_FILE_FROM_EMPTY_CONTENT % FromFile)
+
+ if ReadOnly:
+ if not Executable:
+ chmod(ToFile, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
+ else:
+ chmod(ToFile, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH)
+ elif Executable:
+ chmod(ToFile, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IWUSR | stat.S_IWGRP |
+ stat.S_IWOTH | stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH)
+ else:
+ chmod(ToFile, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
+
+ Md5Signature = md5(__FileHookOpen__(str(ToFile), 'rb').read())
+ Md5Sum = Md5Signature.hexdigest()
+
+ return Md5Sum
+
+## InstallPackageContent method
+#
+# @param FromPath: FromPath
+# @param ToPath: ToPath
+# @param Package: Package
+# @param ContentZipFile: ContentZipFile
+# @param Dep: Dep
+# @param WorkspaceDir: WorkspaceDir
+# @param ModuleList: ModuleList
+#
+def InstallPackageContent(FromPath, ToPath, Package, ContentZipFile, Dep,
+ WorkspaceDir, ModuleList, ReadOnly = False):
+ if Dep:
+ pass
+ Package.FileList = []
+
+ if ToPath.startswith("\\") or ToPath.startswith("/"):
+ ToPath = ToPath[1:]
+
+ if not IsValidInstallPath(ToPath):
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%ToPath)
+
+ if FromPath.startswith("\\") or FromPath.startswith("/"):
+ FromPath = FromPath[1:]
+
+ if not IsValidInstallPath(FromPath):
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FromPath)
+
+ PackageFullPath = os.path.normpath(os.path.join(WorkspaceDir, ToPath))
+ for MiscFile in Package.GetMiscFileList():
+ for Item in MiscFile.GetFileList():
+ FileName = Item.GetURI()
+ if FileName.startswith("\\") or FileName.startswith("/"):
+ FileName = FileName[1:]
+
+ if not IsValidInstallPath(FileName):
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
+
+ FromFile = os.path.join(FromPath, FileName)
+ Executable = Item.GetExecutable()
+ ToFile = (os.path.join(PackageFullPath, ConvertPath(FileName)))
+ Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable)
+ if (ToFile, Md5Sum) not in Package.FileList:
+ Package.FileList.append((ToFile, Md5Sum))
+ PackageIncludeArchList = []
+ for Item in Package.GetPackageIncludeFileList():
+ FileName = Item.GetFilePath()
+ if FileName.startswith("\\") or FileName.startswith("/"):
+ FileName = FileName[1:]
+
+ if not IsValidInstallPath(FileName):
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
+
+ FromFile = os.path.join(FromPath, FileName)
+ ToFile = os.path.normpath(os.path.join(PackageFullPath, ConvertPath(FileName)))
+ RetFile = ContentZipFile.UnpackFile(FromFile, ToFile)
+ if RetFile == '':
+ #
+ # a non-exist path in Zipfile will return '', which means an include directory in our case
+ # save the information for later DEC creation usage and also create the directory
+ #
+ PackageIncludeArchList.append([Item.GetFilePath(), Item.GetSupArchList()])
+ CreateDirectory(ToFile)
+ continue
+ if ReadOnly:
+ chmod(ToFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
+ else:
+ chmod(ToFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH|stat.S_IWUSR|stat.S_IWGRP|stat.S_IWOTH)
+ Md5Signature = md5(__FileHookOpen__(str(ToFile), 'rb').read())
+ Md5Sum = Md5Signature.hexdigest()
+ if (ToFile, Md5Sum) not in Package.FileList:
+ Package.FileList.append((ToFile, Md5Sum))
+ Package.SetIncludeArchList(PackageIncludeArchList)
+
+ for Item in Package.GetStandardIncludeFileList():
+ FileName = Item.GetFilePath()
+ if FileName.startswith("\\") or FileName.startswith("/"):
+ FileName = FileName[1:]
+
+ if not IsValidInstallPath(FileName):
+ Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
+
+ FromFile = os.path.join(FromPath, FileName)
+ ToFile = os.path.normpath(os.path.join(PackageFullPath, ConvertPath(FileName)))
+ Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
+ if (ToFile, Md5Sum) not in Package.FileList:
+ Package.FileList.append((ToFile, Md5Sum))
+
+ #
+ # Update package
+ #
+ Package.SetPackagePath(Package.GetPackagePath().replace(FromPath,
+ ToPath, 1))
+ Package.SetFullPath(os.path.normpath(os.path.join(PackageFullPath,
+ ConvertPath(Package.GetName()) + '.dec')))
+
+ #
+ # Install files in module
+ #
+ Module = None
+ ModuleDict = Package.GetModuleDict()
+ for ModuleGuid, ModuleVersion, ModuleName, ModulePath in ModuleDict:
+ Module = ModuleDict[ModuleGuid, ModuleVersion, ModuleName, ModulePath]
+ InstallModuleContent(FromPath, ToPath, ModulePath, Module,
+ ContentZipFile, WorkspaceDir, ModuleList, Package, ReadOnly)
+
+## GetDPFile method
+#
+# @param ZipFile: A ZipFile
+#
+def GetDPFile(ZipFile):
+ ContentFile = ''
+ DescFile = ''
+ for FileName in ZipFile.namelist():
+ if FileName.endswith('.content'):
+ if not ContentFile:
+ ContentFile = FileName
+ continue
+ elif FileName.endswith('.pkg'):
+ if not DescFile:
+ DescFile = FileName
+ continue
+ else:
+ continue
+
+ Logger.Error("PackagingTool", FILE_TYPE_MISMATCH,
+ ExtraData=ST.ERR_DIST_FILE_TOOMANY)
+ if not DescFile or not ContentFile:
+ Logger.Error("PackagingTool", FILE_UNKNOWN_ERROR,
+ ExtraData=ST.ERR_DIST_FILE_TOOFEW)
+ return DescFile, ContentFile
+
+## InstallDp method
+#
+# Install the distribution to current workspace
+#
+def InstallDp(DistPkg, DpPkgFileName, ContentZipFile, Options, Dep, WorkspaceDir, DataBase):
+ #
+ # PackageList, ModuleList record the information for the meta-data
+ # files that need to be generated later
+ #
+ PackageList = []
+ ModuleList = []
+ DistPkg.PackageSurfaceArea = GetPackageList(DistPkg, Dep, WorkspaceDir, Options,
+ ContentZipFile, ModuleList, PackageList)
+
+ DistPkg.ModuleSurfaceArea = GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList)
+
+ GenToolMisc(DistPkg, WorkspaceDir, ContentZipFile)
+
+ #
+ # copy "Distribution File" to directory $(WORKSPACE)/conf/upt
+ #
+ DistFileName = os.path.split(DpPkgFileName)[1]
+ NewDpPkgFileName = BackupDist(DpPkgFileName, DistPkg.Header.GetGuid(), DistPkg.Header.GetVersion(), WorkspaceDir)
+
+ #
+ # update database
+ #
+ Logger.Quiet(ST.MSG_UPDATE_PACKAGE_DATABASE)
+ DataBase.AddDPObject(DistPkg, NewDpPkgFileName, DistFileName,
+ DistPkg.Header.RePackage)
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/InventoryWs.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/InventoryWs.py
new file mode 100755
index 00000000..262c6af5
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/InventoryWs.py
@@ -0,0 +1,111 @@
+## @file
+# Inventory workspace's distribution package information.
+#
+# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+"""
+Inventory workspace's distribution package information.
+"""
+##
+# Import Modules
+#
+from sys import platform
+from traceback import format_exc
+from platform import python_version
+
+from Logger import StringTable as ST
+from Logger.ToolError import FatalError
+from Logger.ToolError import ABORT_ERROR
+from Logger.ToolError import CODE_ERROR
+import Logger.Log as Logger
+
+from Library import GlobalData
+
+## InventoryDistInstalled
+#
+# This method retrieves the installed distribution information from the internal UPT database
+#
+# @param DataBase: the UPT database
+#
+def InventoryDistInstalled(DataBase):
+ DistInstalled = DataBase.InventoryDistInstalled()
+
+ #
+ # find the max length for each item
+ #
+ DpNameStr = "DpName"
+ DpGuidStr = "DpGuid"
+ DpVerStr = "DpVer"
+ DpOriginalNameStr = "DpOriginalName"
+ MaxGuidlen = len(DpGuidStr)
+ MaxVerlen = len(DpVerStr)
+ MaxDpAliasFileNameLen = len(DpNameStr)
+ MaxDpOrigFileNamelen = len(DpOriginalNameStr)
+
+ for (DpGuid, DpVersion, DpOriginalName, DpAliasFileName) in DistInstalled:
+ MaxGuidlen = max(MaxGuidlen, len(DpGuid))
+ MaxVerlen = max(MaxVerlen, len(DpVersion))
+ MaxDpAliasFileNameLen = max(MaxDpAliasFileNameLen, len(DpAliasFileName))
+ MaxDpOrigFileNamelen = max(MaxDpOrigFileNamelen, len(DpOriginalName))
+
+ OutMsgFmt = "%-*s\t%-*s\t%-*s\t%-s"
+ OutMsg = OutMsgFmt % (MaxDpAliasFileNameLen,
+ DpNameStr,
+ MaxGuidlen,
+ DpGuidStr,
+ MaxVerlen,
+ DpVerStr,
+ DpOriginalNameStr)
+ Logger.Info(OutMsg)
+
+ for (DpGuid, DpVersion, DpFileName, DpAliasFileName) in DistInstalled:
+ OutMsg = OutMsgFmt % (MaxDpAliasFileNameLen,
+ DpAliasFileName,
+ MaxGuidlen,
+ DpGuid,
+ MaxVerlen,
+ DpVersion,
+ DpFileName)
+ Logger.Info(OutMsg)
+
+## Tool entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @param Options: command Options
+#
+def Main(Options = None):
+ if Options:
+ pass
+
+ try:
+ DataBase = GlobalData.gDB
+ InventoryDistInstalled(DataBase)
+ ReturnCode = 0
+ except FatalError as XExcept:
+ ReturnCode = XExcept.args[0]
+ if Logger.GetLevel() <= Logger.DEBUG_9:
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
+ except KeyboardInterrupt:
+ ReturnCode = ABORT_ERROR
+ if Logger.GetLevel() <= Logger.DEBUG_9:
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
+ except:
+ ReturnCode = CODE_ERROR
+ Logger.Error("\nInventoryWs",
+ CODE_ERROR,
+ ST.ERR_UNKNOWN_FATAL_INVENTORYWS_ERR,
+ ExtraData=ST.MSG_SEARCH_FOR_HELP % ST.MSG_EDKII_MAIL_ADDR,
+ RaiseError=False
+ )
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(),
+ platform) + format_exc())
+
+ if ReturnCode == 0:
+ Logger.Quiet(ST.MSG_FINISH)
+
+ return ReturnCode
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/CommentGenerating.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/CommentGenerating.py
new file mode 100755
index 00000000..0994cb69
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/CommentGenerating.py
@@ -0,0 +1,238 @@
+## @file
+# This file is used to define comment generating interface
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+CommentGenerating
+'''
+
+##
+# Import Modules
+#
+from Library.StringUtils import GetSplitValueList
+from Library.DataType import TAB_SPACE_SPLIT
+from Library.DataType import TAB_INF_GUIDTYPE_VAR
+from Library.DataType import USAGE_ITEM_NOTIFY
+from Library.DataType import ITEM_UNDEFINED
+from Library.DataType import TAB_HEADER_COMMENT
+from Library.DataType import TAB_BINARY_HEADER_COMMENT
+from Library.DataType import TAB_COMMENT_SPLIT
+from Library.DataType import TAB_SPECIAL_COMMENT
+from Library.DataType import END_OF_LINE
+from Library.DataType import TAB_COMMENT_EDK1_SPLIT
+from Library.DataType import TAB_COMMENT_EDK1_START
+from Library.DataType import TAB_COMMENT_EDK1_END
+from Library.DataType import TAB_STAR
+from Library.DataType import TAB_PCD_PROMPT
+from Library.UniClassObject import ConvertSpecialUnicodes
+from Library.Misc import GetLocalValue
+## GenTailCommentLines
+#
+# @param TailCommentLines: the tail comment lines that need to be generated
+# @param LeadingSpaceNum: the number of leading space needed for non-first
+# line tail comment
+#
+def GenTailCommentLines (TailCommentLines, LeadingSpaceNum = 0):
+ TailCommentLines = TailCommentLines.rstrip(END_OF_LINE)
+ CommentStr = TAB_SPACE_SPLIT*2 + TAB_SPECIAL_COMMENT + TAB_SPACE_SPLIT + \
+ (END_OF_LINE + LeadingSpaceNum * TAB_SPACE_SPLIT + TAB_SPACE_SPLIT*2 + TAB_SPECIAL_COMMENT + \
+ TAB_SPACE_SPLIT).join(GetSplitValueList(TailCommentLines, END_OF_LINE))
+
+ return CommentStr
+
+## GenGenericComment
+#
+# @param CommentLines: Generic comment Text, maybe Multiple Lines
+#
+def GenGenericComment (CommentLines):
+ if not CommentLines:
+ return ''
+ CommentLines = CommentLines.rstrip(END_OF_LINE)
+ CommentStr = TAB_SPECIAL_COMMENT + TAB_SPACE_SPLIT + (END_OF_LINE + TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT).join\
+ (GetSplitValueList(CommentLines, END_OF_LINE)) + END_OF_LINE
+ return CommentStr
+
+## GenGenericCommentF
+#
+# similar to GenGenericComment but will remove <EOL> at end of comment once,
+# and for line with only <EOL>, '#\n' will be generated instead of '# \n'
+#
+# @param CommentLines: Generic comment Text, maybe Multiple Lines
+# @return CommentStr: Generated comment line
+#
+def GenGenericCommentF (CommentLines, NumOfPound=1, IsPrompt=False, IsInfLibraryClass=False):
+ if not CommentLines:
+ return ''
+ #
+ # if comment end with '\n', then remove it to prevent one extra line
+ # generate later on
+ #
+ if CommentLines.endswith(END_OF_LINE):
+ CommentLines = CommentLines[:-1]
+ CommentStr = ''
+ if IsPrompt:
+ CommentStr += TAB_COMMENT_SPLIT * NumOfPound + TAB_SPACE_SPLIT + TAB_PCD_PROMPT + TAB_SPACE_SPLIT + \
+ CommentLines.replace(END_OF_LINE, '') + END_OF_LINE
+ else:
+ CommentLineList = GetSplitValueList(CommentLines, END_OF_LINE)
+ FindLibraryClass = False
+ for Line in CommentLineList:
+ # If this comment is for @libraryclass and it has multiple lines
+ # make sure the second lines align to the first line after @libraryclass as below
+ #
+ # ## @libraryclass XYZ FIRST_LINE
+ # ## ABC SECOND_LINE
+ #
+ if IsInfLibraryClass and Line.find(u'@libraryclass ') > -1:
+ FindLibraryClass = True
+ if Line == '':
+ CommentStr += TAB_COMMENT_SPLIT * NumOfPound + END_OF_LINE
+ else:
+ if FindLibraryClass and Line.find(u'@libraryclass ') > -1:
+ CommentStr += TAB_COMMENT_SPLIT * NumOfPound + TAB_SPACE_SPLIT + Line + END_OF_LINE
+ elif FindLibraryClass:
+ CommentStr += TAB_COMMENT_SPLIT * NumOfPound + TAB_SPACE_SPLIT * 16 + Line + END_OF_LINE
+ else:
+ CommentStr += TAB_COMMENT_SPLIT * NumOfPound + TAB_SPACE_SPLIT + Line + END_OF_LINE
+
+ return CommentStr
+
+
+## GenHeaderCommentSection
+#
+# Generate Header comment sections
+#
+# @param Abstract One line of abstract
+# @param Description multiple lines of Description
+# @param Copyright possible multiple copyright lines
+# @param License possible multiple license lines
+#
+def GenHeaderCommentSection(Abstract, Description, Copyright, License, IsBinaryHeader=False, \
+ CommChar=TAB_COMMENT_SPLIT):
+ Content = ''
+
+ #
+ # Convert special character to (c), (r) and (tm).
+ #
+ Abstract = ConvertSpecialUnicodes(Abstract)
+ Description = ConvertSpecialUnicodes(Description)
+ if IsBinaryHeader:
+ Content += CommChar * 2 + TAB_SPACE_SPLIT + TAB_BINARY_HEADER_COMMENT + '\r\n'
+ elif CommChar == TAB_COMMENT_EDK1_SPLIT:
+ Content += CommChar + TAB_SPACE_SPLIT + TAB_COMMENT_EDK1_START + TAB_STAR + TAB_SPACE_SPLIT +\
+ TAB_HEADER_COMMENT + '\r\n'
+ else:
+ Content += CommChar * 2 + TAB_SPACE_SPLIT + TAB_HEADER_COMMENT + '\r\n'
+ if Abstract:
+ Abstract = Abstract.rstrip('\r\n')
+ Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
+ (Abstract, '\n'))
+ Content += '\r\n' + CommChar + '\r\n'
+ else:
+ Content += CommChar + '\r\n'
+
+ if Description:
+ Description = Description.rstrip('\r\n')
+ Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
+ (Description, '\n'))
+ Content += '\r\n' + CommChar + '\r\n'
+
+ #
+ # There is no '#\n' line to separate multiple copyright lines in code base
+ #
+ if Copyright:
+ Copyright = Copyright.rstrip('\r\n')
+ Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join\
+ (GetSplitValueList(Copyright, '\n'))
+ Content += '\r\n' + CommChar + '\r\n'
+
+ if License:
+ License = License.rstrip('\r\n')
+ Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
+ (License, '\n'))
+ Content += '\r\n' + CommChar + '\r\n'
+
+ if CommChar == TAB_COMMENT_EDK1_SPLIT:
+ Content += CommChar + TAB_SPACE_SPLIT + TAB_STAR + TAB_COMMENT_EDK1_END + '\r\n'
+ else:
+ Content += CommChar * 2 + '\r\n'
+
+ return Content
+
+
+## GenInfPcdTailComment
+# Generate Pcd tail comment for Inf, this would be one line comment
+#
+# @param Usage: Usage type
+# @param TailCommentText: Comment text for tail comment
+#
+def GenInfPcdTailComment (Usage, TailCommentText):
+ if (Usage == ITEM_UNDEFINED) and (not TailCommentText):
+ return ''
+
+ CommentLine = TAB_SPACE_SPLIT.join([Usage, TailCommentText])
+ return GenTailCommentLines(CommentLine)
+
+## GenInfProtocolPPITailComment
+# Generate Protocol/PPI tail comment for Inf
+#
+# @param Usage: Usage type
+# @param TailCommentText: Comment text for tail comment
+#
+def GenInfProtocolPPITailComment (Usage, Notify, TailCommentText):
+ if (not Notify) and (Usage == ITEM_UNDEFINED) and (not TailCommentText):
+ return ''
+
+ if Notify:
+ CommentLine = USAGE_ITEM_NOTIFY + " ## "
+ else:
+ CommentLine = ''
+
+ CommentLine += TAB_SPACE_SPLIT.join([Usage, TailCommentText])
+ return GenTailCommentLines(CommentLine)
+
+## GenInfGuidTailComment
+# Generate Guid tail comment for Inf
+#
+# @param Usage: Usage type
+# @param TailCommentText: Comment text for tail comment
+#
+def GenInfGuidTailComment (Usage, GuidTypeList, VariableName, TailCommentText):
+ GuidType = GuidTypeList[0]
+ if (Usage == ITEM_UNDEFINED) and (GuidType == ITEM_UNDEFINED) and \
+ (not TailCommentText):
+ return ''
+
+ FirstLine = Usage + " ## " + GuidType
+ if GuidType == TAB_INF_GUIDTYPE_VAR:
+ FirstLine += ":" + VariableName
+
+ CommentLine = TAB_SPACE_SPLIT.join([FirstLine, TailCommentText])
+ return GenTailCommentLines(CommentLine)
+
+## GenDecGuidTailComment
+#
+# @param SupModuleList: Supported module type list
+#
+def GenDecTailComment (SupModuleList):
+ CommentLine = TAB_SPACE_SPLIT.join(SupModuleList)
+ return GenTailCommentLines(CommentLine)
+
+
+## _GetHelpStr
+# get HelpString from a list of HelpTextObject, the priority refer to
+# related HLD
+#
+# @param HelpTextObjList: List of HelpTextObject
+#
+# @return HelpStr: the help text string found, '' means no help text found
+#
+def _GetHelpStr(HelpTextObjList):
+ ValueList = []
+ for HelpObj in HelpTextObjList:
+ ValueList.append((HelpObj.GetLang(), HelpObj.GetString()))
+ return GetLocalValue(ValueList, True)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/CommentParsing.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/CommentParsing.py
new file mode 100755
index 00000000..3f450358
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/CommentParsing.py
@@ -0,0 +1,593 @@
+## @file
+# This file is used to define comment parsing interface
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+CommentParsing
+'''
+
+##
+# Import Modules
+#
+import re
+
+from Library.StringUtils import GetSplitValueList
+from Library.StringUtils import CleanString2
+from Library.DataType import HEADER_COMMENT_NOT_STARTED
+from Library.DataType import TAB_COMMENT_SPLIT
+from Library.DataType import HEADER_COMMENT_LICENSE
+from Library.DataType import HEADER_COMMENT_ABSTRACT
+from Library.DataType import HEADER_COMMENT_COPYRIGHT
+from Library.DataType import HEADER_COMMENT_DESCRIPTION
+from Library.DataType import TAB_SPACE_SPLIT
+from Library.DataType import TAB_COMMA_SPLIT
+from Library.DataType import SUP_MODULE_LIST
+from Library.DataType import TAB_VALUE_SPLIT
+from Library.DataType import TAB_PCD_VALIDRANGE
+from Library.DataType import TAB_PCD_VALIDLIST
+from Library.DataType import TAB_PCD_EXPRESSION
+from Library.DataType import TAB_PCD_PROMPT
+from Library.DataType import TAB_CAPHEX_START
+from Library.DataType import TAB_HEX_START
+from Library.DataType import PCD_ERR_CODE_MAX_SIZE
+from Library.ExpressionValidate import IsValidRangeExpr
+from Library.ExpressionValidate import IsValidListExpr
+from Library.ExpressionValidate import IsValidLogicalExpr
+from Object.POM.CommonObject import TextObject
+from Object.POM.CommonObject import PcdErrorObject
+import Logger.Log as Logger
+from Logger.ToolError import FORMAT_INVALID
+from Logger.ToolError import FORMAT_NOT_SUPPORTED
+from Logger import StringTable as ST
+
+## ParseHeaderCommentSection
+#
+# Parse Header comment section lines, extract Abstract, Description, Copyright
+# , License lines
+#
+# @param CommentList: List of (Comment, LineNumber)
+# @param FileName: FileName of the comment
+#
+def ParseHeaderCommentSection(CommentList, FileName = None, IsBinaryHeader = False):
+ Abstract = ''
+ Description = ''
+ Copyright = ''
+ License = ''
+ EndOfLine = "\n"
+ if IsBinaryHeader:
+ STR_HEADER_COMMENT_START = "@BinaryHeader"
+ else:
+ STR_HEADER_COMMENT_START = "@file"
+ HeaderCommentStage = HEADER_COMMENT_NOT_STARTED
+
+ #
+ # first find the last copyright line
+ #
+ Last = 0
+ for Index in range(len(CommentList)-1, 0, -1):
+ Line = CommentList[Index][0]
+ if _IsCopyrightLine(Line):
+ Last = Index
+ break
+
+ for Item in CommentList:
+ Line = Item[0]
+ LineNo = Item[1]
+
+ if not Line.startswith(TAB_COMMENT_SPLIT) and Line:
+ Logger.Error("\nUPT", FORMAT_INVALID, ST.ERR_INVALID_COMMENT_FORMAT, FileName, Item[1])
+ Comment = CleanString2(Line)[1]
+ Comment = Comment.strip()
+ #
+ # if there are blank lines between License or Description, keep them as they would be
+ # indication of different block; or in the position that Abstract should be, also keep it
+ # as it indicates that no abstract
+ #
+ if not Comment and HeaderCommentStage not in [HEADER_COMMENT_LICENSE, \
+ HEADER_COMMENT_DESCRIPTION, HEADER_COMMENT_ABSTRACT]:
+ continue
+
+ if HeaderCommentStage == HEADER_COMMENT_NOT_STARTED:
+ if Comment.startswith(STR_HEADER_COMMENT_START):
+ HeaderCommentStage = HEADER_COMMENT_ABSTRACT
+ else:
+ License += Comment + EndOfLine
+ else:
+ if HeaderCommentStage == HEADER_COMMENT_ABSTRACT:
+ #
+ # in case there is no abstract and description
+ #
+ if not Comment:
+ HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
+ elif _IsCopyrightLine(Comment):
+ Result, ErrMsg = _ValidateCopyright(Comment)
+ ValidateCopyright(Result, ST.WRN_INVALID_COPYRIGHT, FileName, LineNo, ErrMsg)
+ Copyright += Comment + EndOfLine
+ HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
+ else:
+ Abstract += Comment + EndOfLine
+ HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
+ elif HeaderCommentStage == HEADER_COMMENT_DESCRIPTION:
+ #
+ # in case there is no description
+ #
+ if _IsCopyrightLine(Comment):
+ Result, ErrMsg = _ValidateCopyright(Comment)
+ ValidateCopyright(Result, ST.WRN_INVALID_COPYRIGHT, FileName, LineNo, ErrMsg)
+ Copyright += Comment + EndOfLine
+ HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
+ else:
+ Description += Comment + EndOfLine
+ elif HeaderCommentStage == HEADER_COMMENT_COPYRIGHT:
+ if _IsCopyrightLine(Comment):
+ Result, ErrMsg = _ValidateCopyright(Comment)
+ ValidateCopyright(Result, ST.WRN_INVALID_COPYRIGHT, FileName, LineNo, ErrMsg)
+ Copyright += Comment + EndOfLine
+ else:
+ #
+ # Contents after copyright line are license, those non-copyright lines in between
+ # copyright line will be discarded
+ #
+ if LineNo > Last:
+ if License:
+ License += EndOfLine
+ License += Comment + EndOfLine
+ HeaderCommentStage = HEADER_COMMENT_LICENSE
+ else:
+ if not Comment and not License:
+ continue
+ License += Comment + EndOfLine
+
+ return Abstract.strip(), Description.strip(), Copyright.strip(), License.strip()
+
+## _IsCopyrightLine
+# check whether current line is copyright line, the criteria is whether there is case insensitive keyword "Copyright"
+# followed by zero or more white space characters followed by a "(" character
+#
+# @param LineContent: the line need to be checked
+# @return: True if current line is copyright line, False else
+#
+def _IsCopyrightLine (LineContent):
+ LineContent = LineContent.upper()
+ Result = False
+
+ ReIsCopyrightRe = re.compile(r"""(^|\s)COPYRIGHT *\(""", re.DOTALL)
+ if ReIsCopyrightRe.search(LineContent):
+ Result = True
+
+ return Result
+
+## ParseGenericComment
+#
+# @param GenericComment: Generic comment list, element of
+# (CommentLine, LineNum)
+# @param ContainerFile: Input value for filename of Dec file
+#
+def ParseGenericComment (GenericComment, ContainerFile=None, SkipTag=None):
+ if ContainerFile:
+ pass
+ HelpTxt = None
+ HelpStr = ''
+
+ for Item in GenericComment:
+ CommentLine = Item[0]
+ Comment = CleanString2(CommentLine)[1]
+ if SkipTag is not None and Comment.startswith(SkipTag):
+ Comment = Comment.replace(SkipTag, '', 1)
+ HelpStr += Comment + '\n'
+
+ if HelpStr:
+ HelpTxt = TextObject()
+ if HelpStr.endswith('\n') and not HelpStr.endswith('\n\n') and HelpStr != '\n':
+ HelpStr = HelpStr[:-1]
+ HelpTxt.SetString(HelpStr)
+
+ return HelpTxt
+
+## ParsePcdErrorCode
+#
+# @param Value: original ErrorCode value
+# @param ContainerFile: Input value for filename of Dec file
+# @param LineNum: Line Num
+#
+def ParsePcdErrorCode (Value = None, ContainerFile = None, LineNum = None):
+ try:
+ if Value.strip().startswith((TAB_HEX_START, TAB_CAPHEX_START)):
+ Base = 16
+ else:
+ Base = 10
+ ErrorCode = int(Value, Base)
+ if ErrorCode > PCD_ERR_CODE_MAX_SIZE or ErrorCode < 0:
+ Logger.Error('Parser',
+ FORMAT_NOT_SUPPORTED,
+ "The format %s of ErrorCode is not valid, should be UNIT32 type or long type" % Value,
+ File = ContainerFile,
+ Line = LineNum)
+ ErrorCode = '0x%x' % ErrorCode
+ return ErrorCode
+ except ValueError as XStr:
+ if XStr:
+ pass
+ Logger.Error('Parser',
+ FORMAT_NOT_SUPPORTED,
+ "The format %s of ErrorCode is not valid, should be UNIT32 type or long type" % Value,
+ File = ContainerFile,
+ Line = LineNum)
+
+## ParseDecPcdGenericComment
+#
+# @param GenericComment: Generic comment list, element of (CommentLine,
+# LineNum)
+# @param ContainerFile: Input value for filename of Dec file
+#
+def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCName, CName, MacroReplaceDict):
+ HelpStr = ''
+ PromptStr = ''
+ PcdErr = None
+ PcdErrList = []
+ ValidValueNum = 0
+ ValidRangeNum = 0
+ ExpressionNum = 0
+
+ for (CommentLine, LineNum) in GenericComment:
+ Comment = CleanString2(CommentLine)[1]
+ #
+ # To replace Macro
+ #
+ MACRO_PATTERN = '[\t\s]*\$\([A-Z][_A-Z0-9]*\)'
+ MatchedStrs = re.findall(MACRO_PATTERN, Comment)
+ for MatchedStr in MatchedStrs:
+ if MatchedStr:
+ Macro = MatchedStr.strip().lstrip('$(').rstrip(')').strip()
+ if Macro in MacroReplaceDict:
+ Comment = Comment.replace(MatchedStr, MacroReplaceDict[Macro])
+ if Comment.startswith(TAB_PCD_VALIDRANGE):
+ if ValidValueNum > 0 or ExpressionNum > 0:
+ Logger.Error('Parser',
+ FORMAT_NOT_SUPPORTED,
+ ST.WRN_MULTI_PCD_RANGES,
+ File = ContainerFile,
+ Line = LineNum)
+ else:
+ PcdErr = PcdErrorObject()
+ PcdErr.SetTokenSpaceGuidCName(TokenSpaceGuidCName)
+ PcdErr.SetCName(CName)
+ PcdErr.SetFileLine(Comment)
+ PcdErr.SetLineNum(LineNum)
+ ValidRangeNum += 1
+ ValidRange = Comment.replace(TAB_PCD_VALIDRANGE, "", 1).strip()
+ Valid, Cause = _CheckRangeExpression(ValidRange)
+ if Valid:
+ ValueList = ValidRange.split(TAB_VALUE_SPLIT)
+ if len(ValueList) > 1:
+ PcdErr.SetValidValueRange((TAB_VALUE_SPLIT.join(ValueList[1:])).strip())
+ PcdErr.SetErrorNumber(ParsePcdErrorCode(ValueList[0], ContainerFile, LineNum))
+ else:
+ PcdErr.SetValidValueRange(ValidRange)
+ PcdErrList.append(PcdErr)
+ else:
+ Logger.Error("Parser",
+ FORMAT_NOT_SUPPORTED,
+ Cause,
+ ContainerFile,
+ LineNum)
+ elif Comment.startswith(TAB_PCD_VALIDLIST):
+ if ValidRangeNum > 0 or ExpressionNum > 0:
+ Logger.Error('Parser',
+ FORMAT_NOT_SUPPORTED,
+ ST.WRN_MULTI_PCD_RANGES,
+ File = ContainerFile,
+ Line = LineNum)
+ elif ValidValueNum > 0:
+ Logger.Error('Parser',
+ FORMAT_NOT_SUPPORTED,
+ ST.WRN_MULTI_PCD_VALIDVALUE,
+ File = ContainerFile,
+ Line = LineNum)
+ else:
+ PcdErr = PcdErrorObject()
+ PcdErr.SetTokenSpaceGuidCName(TokenSpaceGuidCName)
+ PcdErr.SetCName(CName)
+ PcdErr.SetFileLine(Comment)
+ PcdErr.SetLineNum(LineNum)
+ ValidValueNum += 1
+ ValidValueExpr = Comment.replace(TAB_PCD_VALIDLIST, "", 1).strip()
+ Valid, Cause = _CheckListExpression(ValidValueExpr)
+ if Valid:
+ ValidValue = Comment.replace(TAB_PCD_VALIDLIST, "", 1).replace(TAB_COMMA_SPLIT, TAB_SPACE_SPLIT)
+ ValueList = ValidValue.split(TAB_VALUE_SPLIT)
+ if len(ValueList) > 1:
+ PcdErr.SetValidValue((TAB_VALUE_SPLIT.join(ValueList[1:])).strip())
+ PcdErr.SetErrorNumber(ParsePcdErrorCode(ValueList[0], ContainerFile, LineNum))
+ else:
+ PcdErr.SetValidValue(ValidValue)
+ PcdErrList.append(PcdErr)
+ else:
+ Logger.Error("Parser",
+ FORMAT_NOT_SUPPORTED,
+ Cause,
+ ContainerFile,
+ LineNum)
+ elif Comment.startswith(TAB_PCD_EXPRESSION):
+ if ValidRangeNum > 0 or ValidValueNum > 0:
+ Logger.Error('Parser',
+ FORMAT_NOT_SUPPORTED,
+ ST.WRN_MULTI_PCD_RANGES,
+ File = ContainerFile,
+ Line = LineNum)
+ else:
+ PcdErr = PcdErrorObject()
+ PcdErr.SetTokenSpaceGuidCName(TokenSpaceGuidCName)
+ PcdErr.SetCName(CName)
+ PcdErr.SetFileLine(Comment)
+ PcdErr.SetLineNum(LineNum)
+ ExpressionNum += 1
+ Expression = Comment.replace(TAB_PCD_EXPRESSION, "", 1).strip()
+ Valid, Cause = _CheckExpression(Expression)
+ if Valid:
+ ValueList = Expression.split(TAB_VALUE_SPLIT)
+ if len(ValueList) > 1:
+ PcdErr.SetExpression((TAB_VALUE_SPLIT.join(ValueList[1:])).strip())
+ PcdErr.SetErrorNumber(ParsePcdErrorCode(ValueList[0], ContainerFile, LineNum))
+ else:
+ PcdErr.SetExpression(Expression)
+ PcdErrList.append(PcdErr)
+ else:
+ Logger.Error("Parser",
+ FORMAT_NOT_SUPPORTED,
+ Cause,
+ ContainerFile,
+ LineNum)
+ elif Comment.startswith(TAB_PCD_PROMPT):
+ if PromptStr:
+ Logger.Error('Parser',
+ FORMAT_NOT_SUPPORTED,
+ ST.WRN_MULTI_PCD_PROMPT,
+ File = ContainerFile,
+ Line = LineNum)
+ PromptStr = Comment.replace(TAB_PCD_PROMPT, "", 1).strip()
+ else:
+ if Comment:
+ HelpStr += Comment + '\n'
+
+ #
+ # remove the last EOL if the comment is of format 'FOO\n'
+ #
+ if HelpStr.endswith('\n'):
+ if HelpStr != '\n' and not HelpStr.endswith('\n\n'):
+ HelpStr = HelpStr[:-1]
+
+ return HelpStr, PcdErrList, PromptStr
+
+## ParseDecPcdTailComment
+#
+# @param TailCommentList: Tail comment list of Pcd, item of format (Comment, LineNum)
+# @param ContainerFile: Input value for filename of Dec file
+# @retVal SupModuleList: The supported module type list detected
+# @retVal HelpStr: The generic help text string detected
+#
+def ParseDecPcdTailComment (TailCommentList, ContainerFile):
+ assert(len(TailCommentList) == 1)
+ TailComment = TailCommentList[0][0]
+ LineNum = TailCommentList[0][1]
+
+ Comment = TailComment.lstrip(" #")
+
+ ReFindFirstWordRe = re.compile(r"""^([^ #]*)""", re.DOTALL)
+
+ #
+ # get first word and compare with SUP_MODULE_LIST
+ #
+ MatchObject = ReFindFirstWordRe.match(Comment)
+ if not (MatchObject and MatchObject.group(1) in SUP_MODULE_LIST):
+ return None, Comment
+
+ #
+ # parse line, it must have supported module type specified
+ #
+ if Comment.find(TAB_COMMENT_SPLIT) == -1:
+ Comment += TAB_COMMENT_SPLIT
+ SupMode, HelpStr = GetSplitValueList(Comment, TAB_COMMENT_SPLIT, 1)
+ SupModuleList = []
+ for Mod in GetSplitValueList(SupMode, TAB_SPACE_SPLIT):
+ if not Mod:
+ continue
+ elif Mod not in SUP_MODULE_LIST:
+ Logger.Error("UPT",
+ FORMAT_INVALID,
+ ST.WRN_INVALID_MODULE_TYPE%Mod,
+ ContainerFile,
+ LineNum)
+ else:
+ SupModuleList.append(Mod)
+
+ return SupModuleList, HelpStr
+
+## _CheckListExpression
+#
+# @param Expression: Pcd value list expression
+#
+def _CheckListExpression(Expression):
+ ListExpr = ''
+ if TAB_VALUE_SPLIT in Expression:
+ ListExpr = Expression[Expression.find(TAB_VALUE_SPLIT)+1:]
+ else:
+ ListExpr = Expression
+
+ return IsValidListExpr(ListExpr)
+
+## _CheckExpression
+#
+# @param Expression: Pcd value expression
+#
+def _CheckExpression(Expression):
+ Expr = ''
+ if TAB_VALUE_SPLIT in Expression:
+ Expr = Expression[Expression.find(TAB_VALUE_SPLIT)+1:]
+ else:
+ Expr = Expression
+ return IsValidLogicalExpr(Expr, True)
+
+## _CheckRangeExpression
+#
+# @param Expression: Pcd range expression
+#
+def _CheckRangeExpression(Expression):
+ RangeExpr = ''
+ if TAB_VALUE_SPLIT in Expression:
+ RangeExpr = Expression[Expression.find(TAB_VALUE_SPLIT)+1:]
+ else:
+ RangeExpr = Expression
+
+ return IsValidRangeExpr(RangeExpr)
+
+## ValidateCopyright
+#
+#
+#
+def ValidateCopyright(Result, ErrType, FileName, LineNo, ErrMsg):
+ if not Result:
+ Logger.Warn("\nUPT", ErrType, FileName, LineNo, ErrMsg)
+
+## _ValidateCopyright
+#
+# @param Line: Line that contains copyright information, # stripped
+#
+# @retval Result: True if line is conformed to Spec format, False else
+# @retval ErrMsg: the detailed error description
+#
+def _ValidateCopyright(Line):
+ if Line:
+ pass
+ Result = True
+ ErrMsg = ''
+
+ return Result, ErrMsg
+
+def GenerateTokenList (Comment):
+ #
+ # Tokenize Comment using '#' and ' ' as token separators
+ #
+ ReplacedComment = None
+ while Comment != ReplacedComment:
+ ReplacedComment = Comment
+ Comment = Comment.replace('##', '#').replace(' ', ' ').replace(' ', '#').strip('# ')
+ return Comment.split('#')
+
+
+#
+# Comment - Comment to parse
+# TypeTokens - A dictionary of type token synonyms
+# RemoveTokens - A list of tokens to remove from help text
+# ParseVariable - True for parsing [Guids]. Otherwise False
+#
+def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable):
+ #
+ # Initialize return values
+ #
+ Usage = None
+ Type = None
+ String = None
+
+ Comment = Comment[0]
+
+ NumTokens = 2
+ if ParseVariable:
+ #
+ # Remove white space around first instance of ':' from Comment if 'Variable'
+ # is in front of ':' and Variable is the 1st or 2nd token in Comment.
+ #
+ List = Comment.split(':', 1)
+ if len(List) > 1:
+ SubList = GenerateTokenList (List[0].strip())
+ if len(SubList) in [1, 2] and SubList[-1] == 'Variable':
+ if List[1].strip().find('L"') == 0:
+ Comment = List[0].strip() + ':' + List[1].strip()
+
+ #
+ # Remove first instance of L"<VariableName> from Comment and put into String
+ # if and only if L"<VariableName>" is the 1st token, the 2nd token. Or
+ # L"<VariableName>" is the third token immediately following 'Variable:'.
+ #
+ End = -1
+ Start = Comment.find('Variable:L"')
+ if Start >= 0:
+ String = Comment[Start + 9:]
+ End = String[2:].find('"')
+ else:
+ Start = Comment.find('L"')
+ if Start >= 0:
+ String = Comment[Start:]
+ End = String[2:].find('"')
+ if End >= 0:
+ SubList = GenerateTokenList (Comment[:Start])
+ if len(SubList) < 2:
+ Comment = Comment[:Start] + String[End + 3:]
+ String = String[:End + 3]
+ Type = 'Variable'
+ NumTokens = 1
+
+ #
+ # Initialize HelpText to Comment.
+ # Content will be remove from HelpText as matching tokens are found
+ #
+ HelpText = Comment
+
+ #
+ # Tokenize Comment using '#' and ' ' as token separators
+ #
+ List = GenerateTokenList (Comment)
+
+ #
+ # Search first two tokens for Usage and Type and remove any matching tokens
+ # from HelpText
+ #
+ for Token in List[0:NumTokens]:
+ if Usage is None and Token in UsageTokens:
+ Usage = UsageTokens[Token]
+ HelpText = HelpText.replace(Token, '')
+ if Usage is not None or not ParseVariable:
+ for Token in List[0:NumTokens]:
+ if Type is None and Token in TypeTokens:
+ Type = TypeTokens[Token]
+ HelpText = HelpText.replace(Token, '')
+ if Usage is not None:
+ for Token in List[0:NumTokens]:
+ if Token in RemoveTokens:
+ HelpText = HelpText.replace(Token, '')
+
+ #
+ # If no Usage token is present and set Usage to UNDEFINED
+ #
+ if Usage is None:
+ Usage = 'UNDEFINED'
+
+ #
+ # If no Type token is present and set Type to UNDEFINED
+ #
+ if Type is None:
+ Type = 'UNDEFINED'
+
+ #
+ # If Type is not 'Variable:', then set String to None
+ #
+ if Type != 'Variable':
+ String = None
+
+ #
+ # Strip ' ' and '#' from the beginning of HelpText
+ # If HelpText is an empty string after all parsing is
+ # complete then set HelpText to None
+ #
+ HelpText = HelpText.lstrip('# ')
+ if HelpText == '':
+ HelpText = None
+
+ #
+ # Return parsing results
+ #
+ return Usage, Type, String, HelpText
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/DataType.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/DataType.py
new file mode 100644
index 00000000..3fd6c03f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/DataType.py
@@ -0,0 +1,949 @@
+## @file
+# This file is used to define class for data type structure
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+DataType
+'''
+
+##
+# Module List Items
+#
+MODULE_LIST = ["BASE",
+ "SEC",
+ "PEI_CORE",
+ "PEIM",
+ "DXE_CORE",
+ "DXE_DRIVER",
+ "SMM_CORE",
+ "DXE_RUNTIME_DRIVER",
+ "DXE_SAL_DRIVER",
+ "DXE_SMM_DRIVER",
+ "UEFI_DRIVER",
+ "UEFI_APPLICATION",
+ "USER_DEFINED"]
+
+VALID_DEPEX_MODULE_TYPE_LIST = ["PEIM",
+ "DXE_DRIVER",
+ "DXE_SMM_DRIVER",
+ "DXE_RUNTIME_DRIVER",
+ "DXE_SAL_DRIVER",
+ "UEFI_DRIVER",
+ ]
+##
+# Usage List Items
+#
+USAGE_LIST = ["CONSUMES",
+ "SOMETIMES_CONSUMES",
+ "PRODUCES",
+ "SOMETIMES_PRODUCES"]
+
+TAB_LANGUAGE_EN_US = 'en-US'
+TAB_LANGUAGE_ENG = 'eng'
+TAB_LANGUAGE_EN = 'en'
+TAB_LANGUAGE_EN_X = 'en-x-tianocore'
+
+USAGE_ITEM_PRODUCES = 'PRODUCES'
+USAGE_ITEM_SOMETIMES_PRODUCES = 'SOMETIMES_PRODUCES'
+USAGE_ITEM_CONSUMES = 'CONSUMES'
+USAGE_ITEM_SOMETIMES_CONSUMES = 'SOMETIMES_CONSUMES'
+USAGE_ITEM_TO_START = 'TO_START'
+USAGE_ITEM_BY_START = 'BY_START'
+USAGE_ITEM_NOTIFY = 'NOTIFY'
+USAGE_ITEM_UNDEFINED = 'UNDEFINED'
+
+USAGE_CONSUMES_LIST = [USAGE_ITEM_CONSUMES,
+ 'CONSUMED',
+ 'ALWAYS_CONSUMED',
+ 'ALWAYS_CONSUMES'
+ ]
+
+USAGE_PRODUCES_LIST = [USAGE_ITEM_PRODUCES,
+ 'PRODUCED',
+ 'ALWAYS_PRODUCED',
+ 'ALWAYS_PRODUCES'
+ ]
+
+USAGE_SOMETIMES_PRODUCES_LIST = [USAGE_ITEM_SOMETIMES_PRODUCES,
+ 'SOMETIMES_PRODUCED'
+ ]
+
+USAGE_SOMETIMES_CONSUMES_LIST = [USAGE_ITEM_SOMETIMES_CONSUMES,
+ 'SOMETIMES_CONSUMED'
+ ]
+
+ITEM_UNDEFINED = 'UNDEFINED'
+
+TAB_PCD_VALIDRANGE = '@ValidRange'
+TAB_PCD_VALIDLIST = '@ValidList'
+TAB_PCD_EXPRESSION = '@Expression'
+TAB_PCD_PROMPT = '@Prompt'
+TAB_STR_TOKENCNAME = 'STR'
+TAB_STR_TOKENPROMPT = 'PROMPT'
+TAB_STR_TOKENHELP = 'HELP'
+TAB_STR_TOKENERR = 'ERR'
+
+#
+# Dictionary of usage tokens and their synonyms
+#
+ALL_USAGE_TOKENS = {
+ "PRODUCES" : "PRODUCES",
+ "PRODUCED" : "PRODUCES",
+ "ALWAYS_PRODUCES" : "PRODUCES",
+ "ALWAYS_PRODUCED" : "PRODUCES",
+ "SOMETIMES_PRODUCES" : "SOMETIMES_PRODUCES",
+ "SOMETIMES_PRODUCED" : "SOMETIMES_PRODUCES",
+ "CONSUMES" : "CONSUMES",
+ "CONSUMED" : "CONSUMES",
+ "ALWAYS_CONSUMES" : "CONSUMES",
+ "ALWAYS_CONSUMED" : "CONSUMES",
+ "SOMETIMES_CONSUMES" : "SOMETIMES_CONSUMES",
+ "SOMETIMES_CONSUMED" : "SOMETIMES_CONSUMES",
+ "SOMETIME_CONSUMES" : "SOMETIMES_CONSUMES",
+ "UNDEFINED" : "UNDEFINED"
+ }
+
+PROTOCOL_USAGE_TOKENS = {
+ "TO_START" : "TO_START",
+ "BY_START" : "BY_START"
+ }
+
+PROTOCOL_USAGE_TOKENS.update (ALL_USAGE_TOKENS)
+
+#
+# Dictionary of GUID type tokens
+#
+GUID_TYPE_TOKENS = {
+ "Event" : "Event",
+ "File" : "File",
+ "FV" : "FV",
+ "GUID" : "GUID",
+ "Guid" : "GUID",
+ "HII" : "HII",
+ "HOB" : "HOB",
+ "Hob" : "HOB",
+ "Hob:" : "HOB",
+ "SystemTable" : "SystemTable",
+ "TokenSpaceGuid" : "TokenSpaceGuid",
+ "UNDEFINED" : "UNDEFINED"
+ }
+
+#
+# Dictionary of Protocol Notify tokens and their synonyms
+#
+PROTOCOL_NOTIFY_TOKENS = {
+ "NOTIFY" : "NOTIFY",
+ "PROTOCOL_NOTIFY" : "NOTIFY",
+ "UNDEFINED" : "UNDEFINED"
+ }
+
+#
+# Dictionary of PPI Notify tokens and their synonyms
+#
+PPI_NOTIFY_TOKENS = {
+ "NOTIFY" : "NOTIFY",
+ "PPI_NOTIFY" : "NOTIFY",
+ "UNDEFINED" : "UNDEFINED"
+ }
+
+EVENT_TOKENS = {
+ "EVENT_TYPE_PERIODIC_TIMER" : "EVENT_TYPE_PERIODIC_TIMER",
+ "EVENT_TYPE_RELATIVE_TIMER" : "EVENT_TYPE_RELATIVE_TIMER",
+ "UNDEFINED" : "UNDEFINED"
+ }
+
+BOOTMODE_TOKENS = {
+ "FULL" : "FULL",
+ "MINIMAL" : "MINIMAL",
+ "NO_CHANGE" : "NO_CHANGE",
+ "DIAGNOSTICS" : "DIAGNOSTICS",
+ "DEFAULT" : "DEFAULT",
+ "S2_RESUME" : "S2_RESUME",
+ "S3_RESUME" : "S3_RESUME",
+ "S4_RESUME" : "S4_RESUME",
+ "S5_RESUME" : "S5_RESUME",
+ "FLASH_UPDATE" : "FLASH_UPDATE",
+ "RECOVERY_FULL" : "RECOVERY_FULL",
+ "RECOVERY_MINIMAL" : "RECOVERY_MINIMAL",
+ "RECOVERY_NO_CHANGE" : "RECOVERY_NO_CHANGE",
+ "RECOVERY_DIAGNOSTICS" : "RECOVERY_DIAGNOSTICS",
+ "RECOVERY_DEFAULT" : "RECOVERY_DEFAULT",
+ "RECOVERY_S2_RESUME" : "RECOVERY_S2_RESUME",
+ "RECOVERY_S3_RESUME" : "RECOVERY_S3_RESUME",
+ "RECOVERY_S4_RESUME" : "RECOVERY_S4_RESUME",
+ "RECOVERY_S5_RESUME" : "RECOVERY_S5_RESUME",
+ "RECOVERY_FLASH_UPDATE" : "RECOVERY_FLASH_UPDATE",
+ "UNDEFINED" : "UNDEFINED"
+ }
+
+HOB_TOKENS = {
+ "PHIT" : "PHIT",
+ "MEMORY_ALLOCATION" : "MEMORY_ALLOCATION",
+ "LOAD_PEIM" : "LOAD_PEIM",
+ "RESOURCE_DESCRIPTOR" : "RESOURCE_DESCRIPTOR",
+ "FIRMWARE_VOLUME" : "FIRMWARE_VOLUME",
+ "UNDEFINED" : "UNDEFINED"
+ }
+
+##
+# Usage List Items for Protocol
+#
+PROTOCOL_USAGE_LIST = USAGE_LIST + ["TO_START", "BY_START"]
+
+##
+# End of Line
+# Use this but not os.linesep for os.linesep has bug in it.
+#
+END_OF_LINE = '\n'
+
+##
+# Arch List Items
+#
+ARCH_LIST = ["IA32",
+ "X64",
+ "IPF",
+ "EBC",
+ "COMMON"]
+##
+# PCD driver type list items
+#
+PCD_DRIVER_TYPE_LIST = ["PEI_PCD_DRIVER", "DXE_PCD_DRIVER"]
+
+##
+# Boot Mode List Items
+#
+BOOT_MODE_LIST = ["FULL",
+ "MINIMAL",
+ "NO_CHANGE",
+ "DIAGNOSTICS",
+ "DEFAULT",
+ "S2_RESUME",
+ "S3_RESUME",
+ "S4_RESUME",
+ "S5_RESUME",
+ "FLASH_UPDATE",
+ "RECOVERY_FULL",
+ "RECOVERY_MINIMAL",
+ "RECOVERY_NO_CHANGE",
+ "RECOVERY_DIAGNOSTICS",
+ "RECOVERY_DEFAULT",
+ "RECOVERY_S2_RESUME",
+ "RECOVERY_S3_RESUME",
+ "RECOVERY_S4_RESUME",
+ "RECOVERY_S5_RESUME",
+ "RECOVERY_FLASH_UPDATE"]
+
+##
+# Event Type List Items
+#
+EVENT_TYPE_LIST = ["EVENT_TYPE_PERIODIC_TIMER",
+ "EVENT_TYPE_RELATIVE_TIMER"]
+
+##
+# Hob Type List Items
+#
+HOB_TYPE_LIST = ["PHIT",
+ "MEMORY_ALLOCATION",
+ "RESOURCE_DESCRIPTOR",
+ "FIRMWARE_VOLUME",
+ "LOAD_PEIM"]
+
+##
+# GUID_TYPE_LIST
+#
+GUID_TYPE_LIST = ["Event", "File", "FV", "GUID", "HII", "HOB",
+ "SystemTable", "TokenSpaceGuid", "Variable"]
+##
+# PCD Usage Type List of Package
+#
+PCD_USAGE_TYPE_LIST_OF_PACKAGE = ["FeatureFlag", "PatchableInModule",
+ "FixedAtBuild", "Dynamic", "DynamicEx"]
+
+##
+# PCD Usage Type List of Module
+#
+PCD_USAGE_TYPE_LIST_OF_MODULE = ["FEATUREPCD", "PATCHPCD", "FIXEDPCD", "PCD", "PCDEX"]
+##
+# PCD Usage Type List of UPT
+#
+PCD_USAGE_TYPE_LIST_OF_UPT = PCD_USAGE_TYPE_LIST_OF_MODULE
+
+##
+# Binary File Type List
+#
+BINARY_FILE_TYPE_LIST = ["PE32", "PIC", "TE", "DXE_DEPEX", "VER", "UI", "COMPAT16", "FV", "BIN", "RAW",
+ "ACPI", "ASL",
+ "PEI_DEPEX",
+ "SMM_DEPEX",
+ "SUBTYPE_GUID",
+ "DISPOSABLE"
+ ]
+BINARY_FILE_TYPE_LIST_IN_UDP = \
+ ["GUID", "FREEFORM",
+ "UEFI_IMAGE", "PE32", "PIC",
+ "PEI_DEPEX",
+ "DXE_DEPEX",
+ "SMM_DEPEX",
+ "FV", "TE",
+ "BIN", "VER", "UI"
+ ]
+
+SUBTYPE_GUID_BINARY_FILE_TYPE = "FREEFORM"
+##
+# Possible values for COMPONENT_TYPE, and their descriptions, are listed in
+# the table,
+# "Component (module) Types." For each component, the BASE_NAME and
+# COMPONENT_TYPE
+# are required. The COMPONENT_TYPE definition is case sensitive.
+#
+COMPONENT_TYPE_LIST = [
+ "APPLICATION",
+ "ACPITABLE",
+ "APRIORI",
+ "BINARY",
+ "BS_DRIVER",
+ "CONFIG",
+ "FILE",
+ "FVIMAGEFILE",
+ "LIBRARY",
+ "LOGO",
+ "LEGACY16",
+ "MICROCODE",
+ "PE32_PEIM",
+ "PEI_CORE",
+ "RAWFILE",
+ "RT_DRIVER",
+ "SAL_RT_DRIVER",
+ "SECURITY_CORE",
+ "COMBINED_PEIM_DRIVER",
+ "PIC_PEIM",
+ "RELOCATABLE_PEIM"
+ ]
+
+##
+# Common Definitions
+#
+TAB_SPLIT = '.'
+TAB_COMMENT_EDK1_START = '/*'
+TAB_COMMENT_EDK1_END = '*/'
+TAB_COMMENT_EDK1_SPLIT = '//'
+TAB_COMMENT_SPLIT = '#'
+TAB_EQUAL_SPLIT = '='
+TAB_DEQUAL_SPLIT = '=='
+TAB_VALUE_SPLIT = '|'
+TAB_COMMA_SPLIT = ','
+TAB_HORIZON_LINE_SPLIT = '-'
+TAB_SPACE_SPLIT = ' '
+TAB_UNDERLINE_SPLIT = '_'
+TAB_SEMI_COLON_SPLIT = ';'
+TAB_COLON_SPLIT = ':'
+TAB_SECTION_START = '['
+TAB_SECTION_END = ']'
+TAB_OPTION_START = '<'
+TAB_OPTION_END = '>'
+TAB_SLASH = '\\'
+TAB_BACK_SLASH = '/'
+TAB_SPECIAL_COMMENT = '##'
+TAB_HEADER_COMMENT = '@file'
+TAB_BINARY_HEADER_COMMENT = '@BinaryHeader'
+TAB_STAR = '*'
+TAB_ENCODING_UTF16LE = 'utf_16_le'
+TAB_CAPHEX_START = '0X'
+TAB_HEX_START = '0x'
+TAB_PCD_ERROR = 'Error'
+TAB_PCD_ERROR_SECTION_COMMENT = 'Error message section'
+TAB_UNI_FILE_SUFFIXS = ['.uni', '.UNI', '.Uni']
+
+TAB_EDK_SOURCE = '$(EDK_SOURCE)'
+TAB_EFI_SOURCE = '$(EFI_SOURCE)'
+TAB_WORKSPACE = '$(WORKSPACE)'
+
+TAB_ARCH_NULL = ''
+TAB_ARCH_COMMON = 'COMMON'
+TAB_ARCH_IA32 = 'IA32'
+TAB_ARCH_X64 = 'X64'
+TAB_ARCH_IPF = 'IPF'
+TAB_ARCH_ARM = 'ARM'
+TAB_ARCH_EBC = 'EBC'
+
+ARCH_LIST = \
+[TAB_ARCH_IA32, TAB_ARCH_X64, TAB_ARCH_IPF, TAB_ARCH_ARM, TAB_ARCH_EBC]
+
+SUP_MODULE_BASE = 'BASE'
+SUP_MODULE_SEC = 'SEC'
+SUP_MODULE_PEI_CORE = 'PEI_CORE'
+SUP_MODULE_PEIM = 'PEIM'
+SUP_MODULE_DXE_CORE = 'DXE_CORE'
+SUP_MODULE_DXE_DRIVER = 'DXE_DRIVER'
+SUP_MODULE_DXE_RUNTIME_DRIVER = 'DXE_RUNTIME_DRIVER'
+SUP_MODULE_DXE_SAL_DRIVER = 'DXE_SAL_DRIVER'
+SUP_MODULE_DXE_SMM_DRIVER = 'DXE_SMM_DRIVER'
+SUP_MODULE_UEFI_DRIVER = 'UEFI_DRIVER'
+SUP_MODULE_UEFI_APPLICATION = 'UEFI_APPLICATION'
+SUP_MODULE_USER_DEFINED = 'USER_DEFINED'
+SUP_MODULE_SMM_CORE = 'SMM_CORE'
+
+SUP_MODULE_LIST = \
+[SUP_MODULE_BASE, SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, \
+SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, \
+ SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, \
+ SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_UEFI_DRIVER, \
+ SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_USER_DEFINED, \
+ SUP_MODULE_SMM_CORE]
+SUP_MODULE_LIST_STRING = TAB_VALUE_SPLIT.join(l for l in SUP_MODULE_LIST)
+
+EDK_COMPONENT_TYPE_LIBRARY = 'LIBRARY'
+EDK_COMPONENT_TYPE_SECUARITY_CORE = 'SECUARITY_CORE'
+EDK_COMPONENT_TYPE_PEI_CORE = 'PEI_CORE'
+EDK_COMPONENT_TYPE_COMBINED_PEIM_DRIVER = 'COMBINED_PEIM_DRIVER'
+EDK_COMPONENT_TYPE_PIC_PEIM = 'PIC_PEIM'
+EDK_COMPONENT_TYPE_RELOCATABLE_PEIM = 'RELOCATABLE_PEIM'
+EDK_COMPONENT_TYPE_BS_DRIVER = 'BS_DRIVER'
+EDK_COMPONENT_TYPE_RT_DRIVER = 'RT_DRIVER'
+EDK_COMPONENT_TYPE_SAL_RT_DRIVER = 'SAL_RT_DRIVER'
+EDK_COMPONENT_TYPE_APPLICATION = 'APPLICATION'
+EDK_NAME = 'EDK'
+EDKII_NAME = 'EDKII'
+
+BINARY_FILE_TYPE_FW = 'FW'
+BINARY_FILE_TYPE_GUID = 'GUID'
+BINARY_FILE_TYPE_PREEFORM = 'PREEFORM'
+BINARY_FILE_TYPE_UEFI_APP = 'UEFI_APP'
+BINARY_FILE_TYPE_UNI_UI = 'UNI_UI'
+BINARY_FILE_TYPE_SEC_UI = 'SEC_UI'
+BINARY_FILE_TYPE_UNI_VER = 'UNI_VER'
+BINARY_FILE_TYPE_SEC_VER = 'SEC_VER'
+BINARY_FILE_TYPE_LIB = 'LIB'
+BINARY_FILE_TYPE_PE32 = 'PE32'
+BINARY_FILE_TYPE_PIC = 'PIC'
+BINARY_FILE_TYPE_PEI_DEPEX = 'PEI_DEPEX'
+BINARY_FILE_TYPE_DXE_DEPEX = 'DXE_DEPEX'
+BINARY_FILE_TYPE_SMM_DEPEX = 'SMM_DEPEX'
+BINARY_FILE_TYPE_TE = 'TE'
+BINARY_FILE_TYPE_VER = 'VER'
+BINARY_FILE_TYPE_UI = 'UI'
+BINARY_FILE_TYPE_BIN = 'BIN'
+BINARY_FILE_TYPE_FV = 'FV'
+BINARY_FILE_TYPE_UI_LIST = [BINARY_FILE_TYPE_UNI_UI,
+ BINARY_FILE_TYPE_SEC_UI,
+ BINARY_FILE_TYPE_UI
+ ]
+BINARY_FILE_TYPE_VER_LIST = [BINARY_FILE_TYPE_UNI_VER,
+ BINARY_FILE_TYPE_SEC_VER,
+ BINARY_FILE_TYPE_VER
+ ]
+
+DEPEX_SECTION_LIST = ['<PEI_DEPEX>',
+ '<DXE_DEPEX>',
+ '<SMM_DEPEX>'
+ ]
+
+PLATFORM_COMPONENT_TYPE_LIBRARY = 'LIBRARY'
+PLATFORM_COMPONENT_TYPE_LIBRARY_CLASS = 'LIBRARY_CLASS'
+PLATFORM_COMPONENT_TYPE_MODULE = 'MODULE'
+
+TAB_LIBRARIES = 'Libraries'
+
+TAB_SOURCE = 'Source'
+TAB_SOURCES = 'Sources'
+TAB_SOURCES_COMMON = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_SOURCES_IA32 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_SOURCES_X64 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_X64
+TAB_SOURCES_IPF = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_SOURCES_ARM = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_SOURCES_EBC = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_BINARIES = 'Binaries'
+TAB_BINARIES_COMMON = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_BINARIES_IA32 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_BINARIES_X64 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_X64
+TAB_BINARIES_IPF = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_BINARIES_ARM = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_BINARIES_EBC = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_INCLUDES = 'Includes'
+TAB_INCLUDES_COMMON = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_INCLUDES_IA32 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_INCLUDES_X64 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_X64
+TAB_INCLUDES_IPF = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_INCLUDES_ARM = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_INCLUDES_EBC = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_GUIDS = 'Guids'
+TAB_GUIDS_COMMON = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_GUIDS_IA32 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_GUIDS_X64 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_X64
+TAB_GUIDS_IPF = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_IPF
+TAB_GUIDS_ARM = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_GUIDS_EBC = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PROTOCOLS = 'Protocols'
+TAB_PROTOCOLS_COMMON = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PROTOCOLS_IA32 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PROTOCOLS_X64 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_X64
+TAB_PROTOCOLS_IPF = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PROTOCOLS_ARM = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PROTOCOLS_EBC = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PPIS = 'Ppis'
+TAB_PPIS_COMMON = TAB_PPIS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PPIS_IA32 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PPIS_X64 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_X64
+TAB_PPIS_IPF = TAB_PPIS + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PPIS_ARM = TAB_PPIS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PPIS_EBC = TAB_PPIS + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_LIBRARY_CLASSES = 'LibraryClasses'
+TAB_LIBRARY_CLASSES_COMMON = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_LIBRARY_CLASSES_IA32 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_LIBRARY_CLASSES_X64 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_X64
+TAB_LIBRARY_CLASSES_IPF = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_LIBRARY_CLASSES_ARM = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_LIBRARY_CLASSES_EBC = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PACKAGES = 'Packages'
+TAB_PACKAGES_COMMON = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PACKAGES_IA32 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PACKAGES_X64 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_X64
+TAB_PACKAGES_IPF = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PACKAGES_ARM = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PACKAGES_EBC = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCDS = 'Pcds'
+TAB_PCDS_FIXED_AT_BUILD = 'FixedAtBuild'
+TAB_PCDS_PATCHABLE_IN_MODULE = 'PatchableInModule'
+TAB_PCDS_FEATURE_FLAG = 'FeatureFlag'
+TAB_PCDS_DYNAMIC_EX = 'DynamicEx'
+TAB_PCDS_DYNAMIC_EX_DEFAULT = 'DynamicExDefault'
+TAB_PCDS_DYNAMIC_EX_VPD = 'DynamicExVpd'
+TAB_PCDS_DYNAMIC_EX_HII = 'DynamicExHii'
+TAB_PCDS_DYNAMIC = 'Dynamic'
+TAB_PCDS_DYNAMIC_DEFAULT = 'DynamicDefault'
+TAB_PCDS_DYNAMIC_VPD = 'DynamicVpd'
+TAB_PCDS_DYNAMIC_HII = 'DynamicHii'
+
+TAB_PTR_TYPE_PCD = 'VOID*'
+
+PCD_DYNAMIC_TYPE_LIST = [TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_DEFAULT, \
+ TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_HII]
+PCD_DYNAMIC_EX_TYPE_LIST = [TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, \
+ TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII]
+
+## Dynamic-ex PCD types
+#
+gDYNAMIC_EX_PCD = [TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, \
+ TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII]
+
+TAB_PCDS_FIXED_AT_BUILD_NULL = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD
+TAB_PCDS_FIXED_AT_BUILD_COMMON = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + \
+TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_FIXED_AT_BUILD_IA32 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + \
+TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_FIXED_AT_BUILD_X64 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + \
+TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_FIXED_AT_BUILD_IPF = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + \
+TAB_SPLIT + TAB_ARCH_IPF
+TAB_PCDS_FIXED_AT_BUILD_ARM = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + \
+TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_FIXED_AT_BUILD_EBC = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + \
+TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCDS_PATCHABLE_IN_MODULE_NULL = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE
+TAB_PCDS_PATCHABLE_IN_MODULE_COMMON = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE \
++ TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_PATCHABLE_IN_MODULE_IA32 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + \
+TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_PATCHABLE_IN_MODULE_X64 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + \
+TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_PATCHABLE_IN_MODULE_IPF = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + \
+TAB_SPLIT + TAB_ARCH_IPF
+TAB_PCDS_PATCHABLE_IN_MODULE_ARM = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + \
+TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_PATCHABLE_IN_MODULE_EBC = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + \
+TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCDS_FEATURE_FLAG_NULL = TAB_PCDS + TAB_PCDS_FEATURE_FLAG
+TAB_PCDS_FEATURE_FLAG_COMMON = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT \
++ TAB_ARCH_COMMON
+TAB_PCDS_FEATURE_FLAG_IA32 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + \
+TAB_ARCH_IA32
+TAB_PCDS_FEATURE_FLAG_X64 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + \
+TAB_ARCH_X64
+TAB_PCDS_FEATURE_FLAG_IPF = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + \
+TAB_ARCH_IPF
+TAB_PCDS_FEATURE_FLAG_ARM = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + \
+TAB_ARCH_ARM
+TAB_PCDS_FEATURE_FLAG_EBC = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + \
+TAB_ARCH_EBC
+
+TAB_PCDS_DYNAMIC_EX_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX
+TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_DEFAULT
+TAB_PCDS_DYNAMIC_EX_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_HII
+TAB_PCDS_DYNAMIC_EX_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_VPD
+TAB_PCDS_DYNAMIC_EX_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + \
+TAB_ARCH_COMMON
+TAB_PCDS_DYNAMIC_EX_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + \
+TAB_ARCH_IA32
+TAB_PCDS_DYNAMIC_EX_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + \
+TAB_ARCH_X64
+TAB_PCDS_DYNAMIC_EX_IPF = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + \
+TAB_ARCH_IPF
+TAB_PCDS_DYNAMIC_EX_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + \
+TAB_ARCH_ARM
+TAB_PCDS_DYNAMIC_EX_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + \
+TAB_ARCH_EBC
+
+TAB_PCDS_DYNAMIC_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC
+TAB_PCDS_DYNAMIC_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_DEFAULT
+TAB_PCDS_DYNAMIC_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_HII
+TAB_PCDS_DYNAMIC_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_VPD
+TAB_PCDS_DYNAMIC_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + \
+TAB_ARCH_COMMON
+TAB_PCDS_DYNAMIC_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_DYNAMIC_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_DYNAMIC_IPF = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PCDS_DYNAMIC_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_DYNAMIC_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCD_DYNAMIC_TYPE_LIST = [TAB_PCDS_DYNAMIC_DEFAULT_NULL, \
+ TAB_PCDS_DYNAMIC_VPD_NULL, \
+ TAB_PCDS_DYNAMIC_HII_NULL]
+TAB_PCD_DYNAMIC_EX_TYPE_LIST = [TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, \
+ TAB_PCDS_DYNAMIC_EX_VPD_NULL, \
+ TAB_PCDS_DYNAMIC_EX_HII_NULL]
+
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE = \
+'PcdLoadFixAddressPeiCodePageNumber'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE_DATA_TYPE = 'UINT32'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE = \
+'PcdLoadFixAddressBootTimeCodePageNumber'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE_DATA_TYPE = 'UINT32'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE = \
+'PcdLoadFixAddressRuntimeCodePageNumber'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE_DATA_TYPE = 'UINT32'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE = \
+'PcdLoadFixAddressSmmCodePageNumber'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE_DATA_TYPE = 'UINT32'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_LIST = \
+[TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE, \
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE, \
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE, \
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE]
+PCD_SECTION_LIST = [TAB_PCDS_FIXED_AT_BUILD_NULL.upper(), \
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper(), \
+ TAB_PCDS_FEATURE_FLAG_NULL.upper(), \
+ TAB_PCDS_DYNAMIC_EX_NULL.upper(), \
+ TAB_PCDS_DYNAMIC_NULL.upper()]
+INF_PCD_SECTION_LIST = ["FixedPcd".upper(), "FeaturePcd".upper(), \
+ "PatchPcd".upper(), "Pcd".upper(), "PcdEx".upper()]
+
+TAB_DEPEX = 'Depex'
+TAB_DEPEX_COMMON = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_DEPEX_IA32 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_IA32
+TAB_DEPEX_X64 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_X64
+TAB_DEPEX_IPF = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_IPF
+TAB_DEPEX_ARM = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_ARM
+TAB_DEPEX_EBC = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_SKUIDS = 'SkuIds'
+
+TAB_LIBRARIES = 'Libraries'
+TAB_LIBRARIES_COMMON = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_LIBRARIES_IA32 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_LIBRARIES_X64 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_X64
+TAB_LIBRARIES_IPF = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_LIBRARIES_ARM = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_LIBRARIES_EBC = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_COMPONENTS = 'Components'
+TAB_COMPONENTS_COMMON = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_COMPONENTS_IA32 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_COMPONENTS_X64 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_X64
+TAB_COMPONENTS_IPF = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_IPF
+TAB_COMPONENTS_ARM = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_COMPONENTS_EBC = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_BUILD_OPTIONS = 'BuildOptions'
+
+TAB_DEFINE = 'DEFINE'
+TAB_NMAKE = 'Nmake'
+TAB_USER_EXTENSIONS = 'UserExtensions'
+TAB_INCLUDE = '!include'
+TAB_PRIVATE = 'Private'
+TAB_INTEL = 'Intel'
+
+#
+# Common Define
+#
+TAB_COMMON_DEFINES = 'Defines'
+
+#
+# Inf Definitions
+#
+TAB_INF_DEFINES = TAB_COMMON_DEFINES
+TAB_INF_DEFINES_INF_VERSION = 'INF_VERSION'
+TAB_INF_DEFINES_BASE_NAME = 'BASE_NAME'
+TAB_INF_DEFINES_FILE_GUID = 'FILE_GUID'
+TAB_INF_DEFINES_MODULE_TYPE = 'MODULE_TYPE'
+TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION = 'EFI_SPECIFICATION_VERSION'
+TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION = 'UEFI_SPECIFICATION_VERSION'
+TAB_INF_DEFINES_PI_SPECIFICATION_VERSION = 'PI_SPECIFICATION_VERSION'
+TAB_INF_DEFINES_EDK_RELEASE_VERSION = 'EDK_RELEASE_VERSION'
+TAB_INF_DEFINES_MODULE_UNI_FILE = 'MODULE_UNI_FILE'
+TAB_INF_DEFINES_BINARY_MODULE = 'BINARY_MODULE'
+TAB_INF_DEFINES_LIBRARY_CLASS = 'LIBRARY_CLASS'
+TAB_INF_DEFINES_COMPONENT_TYPE = 'COMPONENT_TYPE'
+TAB_INF_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
+TAB_INF_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
+TAB_INF_DEFINES_BUILD_TYPE = 'BUILD_TYPE'
+TAB_INF_DEFINES_FFS_EXT = 'FFS_EXT'
+TAB_INF_DEFINES_FV_EXT = 'FV_EXT'
+TAB_INF_DEFINES_SOURCE_FV = 'SOURCE_FV'
+TAB_INF_DEFINES_PACKAGE = 'PACKAGE'
+TAB_INF_DEFINES_VERSION_NUMBER = 'VERSION_NUMBER'
+TAB_INF_DEFINES_VERSION = 'VERSION'
+TAB_INF_DEFINES_VERSION_STRING = 'VERSION_STRING'
+TAB_INF_DEFINES_PCD_IS_DRIVER = 'PCD_IS_DRIVER'
+TAB_INF_DEFINES_TIANO_EDK1_FLASHMAP_H = 'TIANO_EDK1_FLASHMAP_H'
+TAB_INF_DEFINES_ENTRY_POINT = 'ENTRY_POINT'
+TAB_INF_DEFINES_UNLOAD_IMAGE = 'UNLOAD_IMAGE'
+TAB_INF_DEFINES_CONSTRUCTOR = 'CONSTRUCTOR'
+TAB_INF_DEFINES_DESTRUCTOR = 'DESTRUCTOR'
+TAB_INF_DEFINES_PCI_VENDOR_ID = 'PCI_VENDOR_ID'
+TAB_INF_DEFINES_PCI_DEVICE_ID = 'PCI_DEVICE_ID'
+TAB_INF_DEFINES_PCI_CLASS_CODE = 'PCI_CLASS_CODE'
+TAB_INF_DEFINES_PCI_REVISION = 'PCI_REVISION'
+TAB_INF_DEFINES_PCI_COMPRESS = 'PCI_COMPRESS'
+TAB_INF_DEFINES_DEFINE = 'DEFINE'
+TAB_INF_DEFINES_SPEC = 'SPEC'
+TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION = 'UEFI_HII_RESOURCE_SECTION'
+TAB_INF_DEFINES_CUSTOM_MAKEFILE = 'CUSTOM_MAKEFILE'
+TAB_INF_DEFINES_MACRO = '__MACROS__'
+TAB_INF_DEFINES_SHADOW = 'SHADOW'
+TAB_INF_DEFINES_DPX_SOURCE = 'DPX_SOURCE'
+TAB_INF_FIXED_PCD = 'FixedPcd'
+TAB_INF_FEATURE_PCD = 'FeaturePcd'
+TAB_INF_PATCH_PCD = 'PatchPcd'
+TAB_INF_PCD = 'Pcd'
+TAB_INF_PCD_EX = 'PcdEx'
+TAB_INF_GUIDTYPE_VAR = 'Variable'
+TAB_INF_ABSTRACT = 'STR_MODULE_ABSTRACT'
+TAB_INF_DESCRIPTION = 'STR_MODULE_DESCRIPTION'
+TAB_INF_LICENSE = 'STR_MODULE_LICENSE'
+TAB_INF_BINARY_ABSTRACT = 'STR_MODULE_BINARY_ABSTRACT'
+TAB_INF_BINARY_DESCRIPTION = 'STR_MODULE_BINARY_DESCRIPTION'
+TAB_INF_BINARY_LICENSE = 'STR_MODULE_BINARY_LICENSE'
+#
+# Dec Definitions
+#
+TAB_DEC_DEFINES = TAB_COMMON_DEFINES
+TAB_DEC_DEFINES_DEC_SPECIFICATION = 'DEC_SPECIFICATION'
+TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'
+TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'
+TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'
+TAB_DEC_DEFINES_PKG_UNI_FILE = 'PACKAGE_UNI_FILE'
+TAB_DEC_PACKAGE_ABSTRACT = 'STR_PACKAGE_ABSTRACT'
+TAB_DEC_PACKAGE_DESCRIPTION = 'STR_PACKAGE_DESCRIPTION'
+TAB_DEC_PACKAGE_LICENSE = 'STR_PACKAGE_LICENSE'
+TAB_DEC_BINARY_ABSTRACT = 'STR_PACKAGE_BINARY_ABSTRACT'
+TAB_DEC_BINARY_DESCRIPTION = 'STR_PACKAGE_BINARY_DESCRIPTION'
+TAB_DEC_BINARY_LICENSE = 'STR_PACKAGE_ASBUILT_LICENSE'
+#
+# Dsc Definitions
+#
+TAB_DSC_DEFINES = TAB_COMMON_DEFINES
+TAB_DSC_DEFINES_PLATFORM_NAME = 'PLATFORM_NAME'
+TAB_DSC_DEFINES_PLATFORM_GUID = 'PLATFORM_GUID'
+TAB_DSC_DEFINES_PLATFORM_VERSION = 'PLATFORM_VERSION'
+TAB_DSC_DEFINES_DSC_SPECIFICATION = 'DSC_SPECIFICATION'
+TAB_DSC_DEFINES_OUTPUT_DIRECTORY = 'OUTPUT_DIRECTORY'
+TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES = 'SUPPORTED_ARCHITECTURES'
+TAB_DSC_DEFINES_BUILD_TARGETS = 'BUILD_TARGETS'
+TAB_DSC_DEFINES_SKUID_IDENTIFIER = 'SKUID_IDENTIFIER'
+TAB_DSC_DEFINES_FLASH_DEFINITION = 'FLASH_DEFINITION'
+TAB_DSC_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
+TAB_DSC_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
+TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'
+TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
+TAB_DSC_DEFINES_DEFINE = 'DEFINE'
+TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS'
+
+#
+# TargetTxt Definitions
+#
+TAB_TAT_DEFINES_ACTIVE_PLATFORM = 'ACTIVE_PLATFORM'
+TAB_TAT_DEFINES_ACTIVE_MODULE = 'ACTIVE_MODULE'
+TAB_TAT_DEFINES_TOOL_CHAIN_CONF = 'TOOL_CHAIN_CONF'
+TAB_TAT_DEFINES_MULTIPLE_THREAD = 'MULTIPLE_THREAD'
+TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER = 'MAX_CONCURRENT_THREAD_NUMBER'
+TAB_TAT_DEFINES_TARGET = 'TARGET'
+TAB_TAT_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
+TAB_TAT_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
+TAB_TAT_DEFINES_BUILD_RULE_CONF = "BUILD_RULE_CONF"
+
+#
+# ToolDef Definitions
+#
+TAB_TOD_DEFINES_TARGET = 'TARGET'
+TAB_TOD_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
+TAB_TOD_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
+TAB_TOD_DEFINES_COMMAND_TYPE = 'COMMAND_TYPE'
+TAB_TOD_DEFINES_FAMILY = 'FAMILY'
+TAB_TOD_DEFINES_BUILDRULEFAMILY = 'BUILDRULEFAMILY'
+
+#
+# Conditional Statements
+#
+TAB_IF = '!if'
+TAB_END_IF = '!endif'
+TAB_ELSE_IF = '!elseif'
+TAB_ELSE = '!else'
+TAB_IF_DEF = '!ifdef'
+TAB_IF_N_DEF = '!ifndef'
+TAB_IF_EXIST = '!if exist'
+
+#
+# Unknown section
+#
+TAB_UNKNOWN = 'UNKNOWN'
+
+#
+# Header section (virtual section for abstract, description, copyright,
+# license)
+#
+TAB_HEADER = 'Header'
+TAB_HEADER_ABSTRACT = 'Abstract'
+TAB_HEADER_DESCRIPTION = 'Description'
+TAB_HEADER_COPYRIGHT = 'Copyright'
+TAB_HEADER_LICENSE = 'License'
+TAB_BINARY_HEADER_IDENTIFIER = 'BinaryHeader'
+TAB_BINARY_HEADER_USERID = 'TianoCore'
+
+#
+# Build database path
+#
+DATABASE_PATH = ":memory:"
+#
+# used by ECC
+#
+MODIFIER_LIST = ['IN', 'OUT', 'OPTIONAL', 'UNALIGNED', 'EFI_RUNTIMESERVICE', \
+ 'EFI_BOOTSERVICE', 'EFIAPI']
+#
+# Dependency Expression
+#
+DEPEX_SUPPORTED_OPCODE = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", \
+ "END", "SOR", "TRUE", "FALSE", '(', ')']
+
+TAB_STATIC_LIBRARY = "STATIC-LIBRARY-FILE"
+TAB_DYNAMIC_LIBRARY = "DYNAMIC-LIBRARY-FILE"
+TAB_FRAMEWORK_IMAGE = "EFI-IMAGE-FILE"
+TAB_C_CODE_FILE = "C-CODE-FILE"
+TAB_C_HEADER_FILE = "C-HEADER-FILE"
+TAB_UNICODE_FILE = "UNICODE-TEXT-FILE"
+TAB_DEPENDENCY_EXPRESSION_FILE = "DEPENDENCY-EXPRESSION-FILE"
+TAB_UNKNOWN_FILE = "UNKNOWN-TYPE-FILE"
+TAB_DEFAULT_BINARY_FILE = "_BINARY_FILE_"
+#
+# used to indicate the state of processing header comment section of dec,
+# inf files
+#
+HEADER_COMMENT_NOT_STARTED = -1
+HEADER_COMMENT_STARTED = 0
+HEADER_COMMENT_FILE = 1
+HEADER_COMMENT_ABSTRACT = 2
+HEADER_COMMENT_DESCRIPTION = 3
+HEADER_COMMENT_COPYRIGHT = 4
+HEADER_COMMENT_LICENSE = 5
+HEADER_COMMENT_END = 6
+
+#
+# Static values for data models
+#
+MODEL_UNKNOWN = 0
+
+MODEL_FILE_C = 1001
+MODEL_FILE_H = 1002
+MODEL_FILE_ASM = 1003
+MODEL_FILE_INF = 1011
+MODEL_FILE_DEC = 1012
+MODEL_FILE_DSC = 1013
+MODEL_FILE_FDF = 1014
+MODEL_FILE_INC = 1015
+MODEL_FILE_CIF = 1016
+
+MODEL_IDENTIFIER_FILE_HEADER = 2001
+MODEL_IDENTIFIER_FUNCTION_HEADER = 2002
+MODEL_IDENTIFIER_COMMENT = 2003
+MODEL_IDENTIFIER_PARAMETER = 2004
+MODEL_IDENTIFIER_STRUCTURE = 2005
+MODEL_IDENTIFIER_VARIABLE = 2006
+MODEL_IDENTIFIER_INCLUDE = 2007
+MODEL_IDENTIFIER_PREDICATE_EXPRESSION = 2008
+MODEL_IDENTIFIER_ENUMERATE = 2009
+MODEL_IDENTIFIER_PCD = 2010
+MODEL_IDENTIFIER_UNION = 2011
+MODEL_IDENTIFIER_MACRO_IFDEF = 2012
+MODEL_IDENTIFIER_MACRO_IFNDEF = 2013
+MODEL_IDENTIFIER_MACRO_DEFINE = 2014
+MODEL_IDENTIFIER_MACRO_ENDIF = 2015
+MODEL_IDENTIFIER_MACRO_PROGMA = 2016
+MODEL_IDENTIFIER_FUNCTION_CALLING = 2018
+MODEL_IDENTIFIER_TYPEDEF = 2017
+MODEL_IDENTIFIER_FUNCTION_DECLARATION = 2019
+MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION = 2020
+
+MODEL_EFI_PROTOCOL = 3001
+MODEL_EFI_PPI = 3002
+MODEL_EFI_GUID = 3003
+MODEL_EFI_LIBRARY_CLASS = 3004
+MODEL_EFI_LIBRARY_INSTANCE = 3005
+MODEL_EFI_PCD = 3006
+MODEL_EFI_SOURCE_FILE = 3007
+MODEL_EFI_BINARY_FILE = 3008
+MODEL_EFI_SKU_ID = 3009
+MODEL_EFI_INCLUDE = 3010
+MODEL_EFI_DEPEX = 3011
+
+MODEL_PCD = 4000
+MODEL_PCD_FIXED_AT_BUILD = 4001
+MODEL_PCD_PATCHABLE_IN_MODULE = 4002
+MODEL_PCD_FEATURE_FLAG = 4003
+MODEL_PCD_DYNAMIC_EX = 4004
+MODEL_PCD_DYNAMIC_EX_DEFAULT = 4005
+MODEL_PCD_DYNAMIC_EX_VPD = 4006
+MODEL_PCD_DYNAMIC_EX_HII = 4007
+MODEL_PCD_DYNAMIC = 4008
+MODEL_PCD_DYNAMIC_DEFAULT = 4009
+MODEL_PCD_DYNAMIC_VPD = 4010
+MODEL_PCD_DYNAMIC_HII = 4011
+
+MODEL_META_DATA_FILE_HEADER = 5000
+MODEL_META_DATA_HEADER = 5001
+MODEL_META_DATA_INCLUDE = 5002
+MODEL_META_DATA_DEFINE = 5003
+MODEL_META_DATA_CONDITIONAL_STATEMENT_IF = 5004
+MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE = 5005
+MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF = 5006
+MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF = 5007
+MODEL_META_DATA_BUILD_OPTION = 5008
+MODEL_META_DATA_COMPONENT = 5009
+MODEL_META_DATA_USER_EXTENSION = 5010
+MODEL_META_DATA_PACKAGE = 5011
+MODEL_META_DATA_NMAKE = 5012
+MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF = 50013
+MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014
+
+TOOL_FAMILY_LIST = ["MSFT",
+ "INTEL",
+ "GCC",
+ "RVCT"
+ ]
+
+TYPE_HOB_SECTION = 'HOB'
+TYPE_EVENT_SECTION = 'EVENT'
+TYPE_BOOTMODE_SECTION = 'BOOTMODE'
+
+PCD_ERR_CODE_MAX_SIZE = 4294967295
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/ExpressionValidate.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/ExpressionValidate.py
new file mode 100755
index 00000000..ae5f835b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/ExpressionValidate.py
@@ -0,0 +1,567 @@
+## @file
+# This file is used to check PCD logical expression
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+ExpressionValidate
+'''
+from __future__ import print_function
+
+##
+# Import Modules
+#
+import re
+from Logger import StringTable as ST
+
+## IsValidBareCString
+#
+# Check if String is comprised by whitespace(0x20), !(0x21), 0x23 - 0x7E
+# or '\n', '\t', '\f', '\r', '\b', '\0', '\\'
+#
+# @param String: string to be checked
+#
+def IsValidBareCString(String):
+ EscapeList = ['n', 't', 'f', 'r', 'b', '0', '\\', '"']
+ PreChar = ''
+ LastChar = ''
+ for Char in String:
+ LastChar = Char
+ if PreChar == '\\':
+ if Char not in EscapeList:
+ return False
+ if Char == '\\':
+ PreChar = ''
+ continue
+ else:
+ IntChar = ord(Char)
+ if IntChar != 0x20 and IntChar != 0x09 and IntChar != 0x21 \
+ and (IntChar < 0x23 or IntChar > 0x7e):
+ return False
+ PreChar = Char
+
+ # Last char cannot be \ if PreChar is not \
+ if LastChar == '\\' and PreChar == LastChar:
+ return False
+ return True
+
+def _ValidateToken(Token):
+ Token = Token.strip()
+ Index = Token.find("\"")
+ if Index != -1:
+ return IsValidBareCString(Token[Index+1:-1])
+ return True
+
+## _ExprError
+#
+# @param Exception: Exception
+#
+class _ExprError(Exception):
+ def __init__(self, Error = ''):
+ Exception.__init__(self)
+ self.Error = Error
+
+## _ExprBase
+#
+class _ExprBase:
+ HEX_PATTERN = '[\t\s]*0[xX][a-fA-F0-9]+'
+ INT_PATTERN = '[\t\s]*[0-9]+'
+ MACRO_PATTERN = '[\t\s]*\$\(([A-Z][_A-Z0-9]*)\)'
+ PCD_PATTERN = \
+ '[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*\.[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*'
+ QUOTED_PATTERN = '[\t\s]*L?"[^"]*"'
+ BOOL_PATTERN = '[\t\s]*(true|True|TRUE|false|False|FALSE)'
+ def __init__(self, Token):
+ self.Token = Token
+ self.Index = 0
+ self.Len = len(Token)
+
+ ## SkipWhitespace
+ #
+ def SkipWhitespace(self):
+ for Char in self.Token[self.Index:]:
+ if Char not in ' \t':
+ break
+ self.Index += 1
+
+ ## IsCurrentOp
+ #
+ # @param OpList: option list
+ #
+ def IsCurrentOp(self, OpList):
+ self.SkipWhitespace()
+ LetterOp = ["EQ", "NE", "GE", "LE", "GT", "LT", "NOT", "and", "AND",
+ "or", "OR", "XOR"]
+ OpMap = {
+ '|' : '|',
+ '&' : '&',
+ '!' : '=',
+ '>' : '=',
+ '<' : '='
+ }
+
+ for Operator in OpList:
+ if not self.Token[self.Index:].startswith(Operator):
+ continue
+
+ self.Index += len(Operator)
+ Char = self.Token[self.Index : self.Index + 1]
+
+ if (Operator in LetterOp and (Char == '_' or Char.isalnum())) \
+ or (Operator in OpMap and OpMap[Operator] == Char):
+ self.Index -= len(Operator)
+ break
+
+ return True
+
+ return False
+
+## _LogicalExpressionParser
+#
+# @param _ExprBase: _ExprBase object
+#
+class _LogicalExpressionParser(_ExprBase):
+ #
+ # STRINGITEM can only be logical field according to spec
+ #
+ STRINGITEM = -1
+
+ #
+ # Evaluate to True or False
+ #
+ LOGICAL = 0
+ REALLOGICAL = 2
+
+ #
+ # Just arithmetic expression
+ #
+ ARITH = 1
+
+ def __init__(self, Token):
+ _ExprBase.__init__(self, Token)
+ self.Parens = 0
+
+ def _CheckToken(self, MatchList):
+ for Match in MatchList:
+ if Match and Match.start() == 0:
+ if not _ValidateToken(
+ self.Token[self.Index:self.Index+Match.end()]
+ ):
+ return False
+
+ self.Index += Match.end()
+ if self.Token[self.Index - 1] == '"':
+ return True
+ if self.Token[self.Index:self.Index+1] == '_' or \
+ self.Token[self.Index:self.Index+1].isalnum():
+ self.Index -= Match.end()
+ return False
+
+ Token = self.Token[self.Index - Match.end():self.Index]
+ if Token.strip() in ["EQ", "NE", "GE", "LE", "GT", "LT",
+ "NOT", "and", "AND", "or", "OR", "XOR"]:
+ self.Index -= Match.end()
+ return False
+
+ return True
+
+ return False
+
+ def IsAtomicNumVal(self):
+ #
+ # Hex number
+ #
+ Match1 = re.compile(self.HEX_PATTERN).match(self.Token[self.Index:])
+
+ #
+ # Number
+ #
+ Match2 = re.compile(self.INT_PATTERN).match(self.Token[self.Index:])
+
+ #
+ # Macro
+ #
+ Match3 = re.compile(self.MACRO_PATTERN).match(self.Token[self.Index:])
+
+ #
+ # PcdName
+ #
+ Match4 = re.compile(self.PCD_PATTERN).match(self.Token[self.Index:])
+
+ return self._CheckToken([Match1, Match2, Match3, Match4])
+
+
+ def IsAtomicItem(self):
+ #
+ # Macro
+ #
+ Match1 = re.compile(self.MACRO_PATTERN).match(self.Token[self.Index:])
+
+ #
+ # PcdName
+ #
+ Match2 = re.compile(self.PCD_PATTERN).match(self.Token[self.Index:])
+
+ #
+ # Quoted string
+ #
+ Match3 = re.compile(self.QUOTED_PATTERN).\
+ match(self.Token[self.Index:].replace('\\\\', '//').\
+ replace('\\\"', '\\\''))
+
+ return self._CheckToken([Match1, Match2, Match3])
+
+ ## A || B
+ #
+ def LogicalExpression(self):
+ Ret = self.SpecNot()
+ while self.IsCurrentOp(['||', 'OR', 'or', '&&', 'AND', 'and', 'XOR', 'xor', '^']):
+ if self.Token[self.Index-1] == '|' and self.Parens <= 0:
+ raise _ExprError(ST.ERR_EXPR_OR % self.Token)
+ if Ret not in [self.ARITH, self.LOGICAL, self.REALLOGICAL, self.STRINGITEM]:
+ raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
+ Ret = self.SpecNot()
+ if Ret not in [self.ARITH, self.LOGICAL, self.REALLOGICAL, self.STRINGITEM]:
+ raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
+ Ret = self.REALLOGICAL
+ return Ret
+
+ def SpecNot(self):
+ if self.IsCurrentOp(["NOT", "!", "not"]):
+ return self.SpecNot()
+ return self.Rel()
+
+ ## A < B, A > B, A <= B, A >= B
+ #
+ def Rel(self):
+ Ret = self.Expr()
+ if self.IsCurrentOp(["<=", ">=", ">", "<", "GT", "LT", "GE", "LE",
+ "==", "EQ", "!=", "NE"]):
+ if Ret == self.STRINGITEM:
+ raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
+ Ret = self.Expr()
+ if Ret == self.REALLOGICAL:
+ raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
+ Ret = self.REALLOGICAL
+ return Ret
+
+ ## A + B, A - B
+ #
+ def Expr(self):
+ Ret = self.Factor()
+ while self.IsCurrentOp(["+", "-", "&", "|", "^", "XOR", "xor"]):
+ if self.Token[self.Index-1] == '|' and self.Parens <= 0:
+ raise _ExprError(ST.ERR_EXPR_OR)
+ if Ret == self.STRINGITEM or Ret == self.REALLOGICAL:
+ raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
+ Ret = self.Factor()
+ if Ret == self.STRINGITEM or Ret == self.REALLOGICAL:
+ raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
+ Ret = self.ARITH
+ return Ret
+
+ ## Factor
+ #
+ def Factor(self):
+ if self.IsCurrentOp(["("]):
+ self.Parens += 1
+ Ret = self.LogicalExpression()
+ if not self.IsCurrentOp([")"]):
+ raise _ExprError(ST.ERR_EXPR_RIGHT_PAREN % \
+ (self.Token, self.Token[self.Index:]))
+ self.Parens -= 1
+ return Ret
+
+ if self.IsAtomicItem():
+ if self.Token[self.Index - 1] == '"':
+ return self.STRINGITEM
+ return self.LOGICAL
+ elif self.IsAtomicNumVal():
+ return self.ARITH
+ else:
+ raise _ExprError(ST.ERR_EXPR_FACTOR % \
+ (self.Token[self.Index:], self.Token))
+
+ ## IsValidLogicalExpression
+ #
+ def IsValidLogicalExpression(self):
+ if self.Len == 0:
+ return False, ST.ERR_EXPRESS_EMPTY
+ try:
+ if self.LogicalExpression() not in [self.ARITH, self.LOGICAL, self.REALLOGICAL, self.STRINGITEM]:
+ return False, ST.ERR_EXPR_LOGICAL % self.Token
+ except _ExprError as XExcept:
+ return False, XExcept.Error
+ self.SkipWhitespace()
+ if self.Index != self.Len:
+ return False, (ST.ERR_EXPR_BOOLEAN % \
+ (self.Token[self.Index:], self.Token))
+ return True, ''
+
+## _ValidRangeExpressionParser
+#
+class _ValidRangeExpressionParser(_ExprBase):
+ INT_RANGE_PATTERN = '[\t\s]*[0-9]+[\t\s]*-[\t\s]*[0-9]+'
+ HEX_RANGE_PATTERN = \
+ '[\t\s]*0[xX][a-fA-F0-9]+[\t\s]*-[\t\s]*0[xX][a-fA-F0-9]+'
+ def __init__(self, Token):
+ _ExprBase.__init__(self, Token)
+ self.Parens = 0
+ self.HEX = 1
+ self.INT = 2
+ self.IsParenHappen = False
+ self.IsLogicalOpHappen = False
+
+ ## IsValidRangeExpression
+ #
+ def IsValidRangeExpression(self):
+ if self.Len == 0:
+ return False, ST.ERR_EXPR_RANGE_EMPTY
+ try:
+ if self.RangeExpression() not in [self.HEX, self.INT]:
+ return False, ST.ERR_EXPR_RANGE % self.Token
+ except _ExprError as XExcept:
+ return False, XExcept.Error
+
+ self.SkipWhitespace()
+ if self.Index != self.Len:
+ return False, (ST.ERR_EXPR_RANGE % self.Token)
+ return True, ''
+
+ ## RangeExpression
+ #
+ def RangeExpression(self):
+ Ret = self.Unary()
+ while self.IsCurrentOp(['OR', 'AND', 'and', 'or']):
+ self.IsLogicalOpHappen = True
+ if not self.IsParenHappen:
+ raise _ExprError(ST.ERR_PAREN_NOT_USED % self.Token)
+ self.IsParenHappen = False
+ Ret = self.Unary()
+
+ if self.IsCurrentOp(['XOR']):
+ Ret = self.Unary()
+
+ return Ret
+
+ ## Unary
+ #
+ def Unary(self):
+ if self.IsCurrentOp(["NOT"]):
+ return self.Unary()
+
+ return self.ValidRange()
+
+ ## ValidRange
+ #
+ def ValidRange(self):
+ Ret = -1
+ if self.IsCurrentOp(["("]):
+ self.IsLogicalOpHappen = False
+ self.IsParenHappen = True
+ self.Parens += 1
+ if self.Parens > 1:
+ raise _ExprError(ST.ERR_EXPR_RANGE_DOUBLE_PAREN_NESTED % self.Token)
+ Ret = self.RangeExpression()
+ if not self.IsCurrentOp([")"]):
+ raise _ExprError(ST.ERR_EXPR_RIGHT_PAREN % self.Token)
+ self.Parens -= 1
+ return Ret
+
+ if self.IsLogicalOpHappen:
+ raise _ExprError(ST.ERR_PAREN_NOT_USED % self.Token)
+
+ if self.IsCurrentOp(["LT", "GT", "LE", "GE", "EQ", "XOR"]):
+ IntMatch = \
+ re.compile(self.INT_PATTERN).match(self.Token[self.Index:])
+ HexMatch = \
+ re.compile(self.HEX_PATTERN).match(self.Token[self.Index:])
+ if HexMatch and HexMatch.start() == 0:
+ self.Index += HexMatch.end()
+ Ret = self.HEX
+ elif IntMatch and IntMatch.start() == 0:
+ self.Index += IntMatch.end()
+ Ret = self.INT
+ else:
+ raise _ExprError(ST.ERR_EXPR_RANGE_FACTOR % (self.Token[self.Index:], self.Token))
+ else:
+ IntRangeMatch = re.compile(
+ self.INT_RANGE_PATTERN).match(self.Token[self.Index:]
+ )
+ HexRangeMatch = re.compile(
+ self.HEX_RANGE_PATTERN).match(self.Token[self.Index:]
+ )
+ if HexRangeMatch and HexRangeMatch.start() == 0:
+ self.Index += HexRangeMatch.end()
+ Ret = self.HEX
+ elif IntRangeMatch and IntRangeMatch.start() == 0:
+ self.Index += IntRangeMatch.end()
+ Ret = self.INT
+ else:
+ raise _ExprError(ST.ERR_EXPR_RANGE % self.Token)
+
+ return Ret
+
+## _ValidListExpressionParser
+#
+class _ValidListExpressionParser(_ExprBase):
+ VALID_LIST_PATTERN = '(0[xX][0-9a-fA-F]+|[0-9]+)([\t\s]*,[\t\s]*(0[xX][0-9a-fA-F]+|[0-9]+))*'
+ def __init__(self, Token):
+ _ExprBase.__init__(self, Token)
+ self.NUM = 1
+
+ def IsValidListExpression(self):
+ if self.Len == 0:
+ return False, ST.ERR_EXPR_LIST_EMPTY
+ try:
+ if self.ListExpression() not in [self.NUM]:
+ return False, ST.ERR_EXPR_LIST % self.Token
+ except _ExprError as XExcept:
+ return False, XExcept.Error
+
+ self.SkipWhitespace()
+ if self.Index != self.Len:
+ return False, (ST.ERR_EXPR_LIST % self.Token)
+
+ return True, ''
+
+ def ListExpression(self):
+ Ret = -1
+ self.SkipWhitespace()
+ ListMatch = re.compile(self.VALID_LIST_PATTERN).match(self.Token[self.Index:])
+ if ListMatch and ListMatch.start() == 0:
+ self.Index += ListMatch.end()
+ Ret = self.NUM
+ else:
+ raise _ExprError(ST.ERR_EXPR_LIST % self.Token)
+
+ return Ret
+
+## _StringTestParser
+#
+class _StringTestParser(_ExprBase):
+ def __init__(self, Token):
+ _ExprBase.__init__(self, Token)
+
+ ## IsValidStringTest
+ #
+ def IsValidStringTest(self):
+ if self.Len == 0:
+ return False, ST.ERR_EXPR_EMPTY
+ try:
+ self.StringTest()
+ except _ExprError as XExcept:
+ return False, XExcept.Error
+ return True, ''
+
+ ## StringItem
+ #
+ def StringItem(self):
+ Match1 = re.compile(self.QUOTED_PATTERN)\
+ .match(self.Token[self.Index:].replace('\\\\', '//')\
+ .replace('\\\"', '\\\''))
+ Match2 = re.compile(self.MACRO_PATTERN).match(self.Token[self.Index:])
+ Match3 = re.compile(self.PCD_PATTERN).match(self.Token[self.Index:])
+ MatchList = [Match1, Match2, Match3]
+ for Match in MatchList:
+ if Match and Match.start() == 0:
+ if not _ValidateToken(
+ self.Token[self.Index:self.Index+Match.end()]
+ ):
+ raise _ExprError(ST.ERR_EXPR_STRING_ITEM % \
+ (self.Token, self.Token[self.Index:]))
+ self.Index += Match.end()
+ Token = self.Token[self.Index - Match.end():self.Index]
+ if Token.strip() in ["EQ", "NE"]:
+ raise _ExprError(ST.ERR_EXPR_STRING_ITEM % \
+ (self.Token, self.Token[self.Index:]))
+ return
+ else:
+ raise _ExprError(ST.ERR_EXPR_STRING_ITEM % \
+ (self.Token, self.Token[self.Index:]))
+
+ ## StringTest
+ #
+ def StringTest(self):
+ self.StringItem()
+ if not self.IsCurrentOp(["==", "EQ", "!=", "NE"]):
+ raise _ExprError(ST.ERR_EXPR_EQUALITY % \
+ (self.Token[self.Index:], self.Token))
+ self.StringItem()
+ if self.Index != self.Len:
+ raise _ExprError(ST.ERR_EXPR_BOOLEAN % \
+ (self.Token[self.Index:], self.Token))
+
+##
+# Check syntax of string test
+#
+# @param Token: string test token
+#
+def IsValidStringTest(Token, Flag=False):
+ #
+ # Not do the check right now, keep the implementation for future enhancement.
+ #
+ if not Flag:
+ return True, ""
+ return _StringTestParser(Token).IsValidStringTest()
+
+
+##
+# Check syntax of logical expression
+#
+# @param Token: expression token
+#
+def IsValidLogicalExpr(Token, Flag=False):
+ #
+ # Not do the check right now, keep the implementation for future enhancement.
+ #
+ if not Flag:
+ return True, ""
+ return _LogicalExpressionParser(Token).IsValidLogicalExpression()
+
+##
+# Check syntax of range expression
+#
+# @param Token: range expression token
+#
+def IsValidRangeExpr(Token):
+ return _ValidRangeExpressionParser(Token).IsValidRangeExpression()
+
+##
+# Check syntax of value list expression token
+#
+# @param Token: value list expression token
+#
+def IsValidListExpr(Token):
+ return _ValidListExpressionParser(Token).IsValidListExpression()
+
+##
+# Check whether the feature flag expression is valid or not
+#
+# @param Token: feature flag expression
+#
+def IsValidFeatureFlagExp(Token, Flag=False):
+ #
+ # Not do the check right now, keep the implementation for future enhancement.
+ #
+ if not Flag:
+ return True, "", Token
+ else:
+ if Token in ['TRUE', 'FALSE', 'true', 'false', 'True', 'False',
+ '0x1', '0x01', '0x0', '0x00']:
+ return True, ""
+ Valid, Cause = IsValidStringTest(Token, Flag)
+ if not Valid:
+ Valid, Cause = IsValidLogicalExpr(Token, Flag)
+ if not Valid:
+ return False, Cause
+ return True, ""
+
+if __name__ == '__main__':
+# print IsValidRangeExpr('LT 9')
+ print(_LogicalExpressionParser('gCrownBayTokenSpaceGuid.PcdPciDevice1BridgeAddressLE0').IsValidLogicalExpression())
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/GlobalData.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/GlobalData.py
new file mode 100644
index 00000000..984294c9
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/GlobalData.py
@@ -0,0 +1,110 @@
+## @file
+# This file is used to define common static strings and global data used by UPT
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+GlobalData
+'''
+
+#
+# The workspace directory
+#
+gWORKSPACE = '.'
+gPACKAGE_PATH = None
+
+#
+# INF module directory
+#
+gINF_MODULE_DIR = "."
+gINF_MODULE_NAME = ''
+
+#
+# the directory to holds upt related files
+#
+gUPT_DIR = r"Conf/upt/"
+
+#
+# Log file for invalid meta-data files during force removing
+#
+gINVALID_MODULE_FILE = gUPT_DIR + r"Invalid_Modules.log"
+
+#
+# File name for content zip file in the distribution
+#
+gCONTENT_FILE = "dist.content"
+
+#
+# File name for XML file in the distribution
+#
+gDESC_FILE = 'dist.pkg'
+
+#
+# Case Insensitive flag
+#
+gCASE_INSENSITIVE = ''
+
+#
+# All Files dictionary
+#
+gALL_FILES = {}
+
+#
+# Database instance
+#
+gDB = None
+
+#
+# list for files that are found in module level but not in INF files,
+# items are (File, ModulePath), all these should be relative to $(WORKSPACE)
+#
+gMISS_FILE_IN_MODLIST = []
+
+#
+# Global Current Line
+#
+gINF_CURRENT_LINE = None
+
+#
+# Global pkg list
+#
+gWSPKG_LIST = []
+
+#
+# Flag used to take WARN as ERROR.
+# By default, only ERROR message will break the tools execution.
+#
+gWARNING_AS_ERROR = False
+
+#
+# Used to specify the temp directory to hold the unpacked distribution files
+#
+gUNPACK_DIR = []
+
+#
+# Flag used to mark whether the INF file is Binary INF or not.
+#
+gIS_BINARY_INF = False
+
+#
+# Used by FileHook module.
+#
+gRECOVERMGR = None
+
+#
+# Used by PCD parser
+#
+gPackageDict = {}
+
+#
+# Used by Library instance parser
+# {FilePath: FileObj}
+#
+gLIBINSTANCEDICT = {}
+
+#
+# Store the list of DIST
+#
+gTO_BE_INSTALLED_DIST_LIST = []
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Misc.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Misc.py
new file mode 100755
index 00000000..da62c9a7
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Misc.py
@@ -0,0 +1,989 @@
+## @file
+# Common routines used by all tools
+#
+# Copyright (c) 2011 - 2019, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Misc
+'''
+
+##
+# Import Modules
+#
+import os.path
+from os import access
+from os import F_OK
+from os import makedirs
+from os import getcwd
+from os import chdir
+from os import listdir
+from os import remove
+from os import rmdir
+from os import linesep
+from os import walk
+from os import environ
+import re
+from collections import OrderedDict as Sdict
+
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger import ToolError
+from Library import GlobalData
+from Library.DataType import SUP_MODULE_LIST
+from Library.DataType import END_OF_LINE
+from Library.DataType import TAB_SPLIT
+from Library.DataType import TAB_LANGUAGE_EN_US
+from Library.DataType import TAB_LANGUAGE_EN
+from Library.DataType import TAB_LANGUAGE_EN_X
+from Library.DataType import TAB_UNI_FILE_SUFFIXS
+from Library.StringUtils import GetSplitValueList
+from Library.ParserValidate import IsValidHexVersion
+from Library.ParserValidate import IsValidPath
+from Object.POM.CommonObject import TextObject
+from Core.FileHook import __FileHookOpen__
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+
+## Convert GUID string in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx style to C
+# structure style
+#
+# @param Guid: The GUID string
+#
+def GuidStringToGuidStructureString(Guid):
+ GuidList = Guid.split('-')
+ Result = '{'
+ for Index in range(0, 3, 1):
+ Result = Result + '0x' + GuidList[Index] + ', '
+ Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4]
+ for Index in range(0, 12, 2):
+ Result = Result + ', 0x' + GuidList[4][Index:Index + 2]
+ Result += '}}'
+ return Result
+
+## Check whether GUID string is of format xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+#
+# @param GuidValue: The GUID value
+#
+def CheckGuidRegFormat(GuidValue):
+ ## Regular expression used to find out register format of GUID
+ #
+ RegFormatGuidPattern = re.compile("^\s*([0-9a-fA-F]){8}-"
+ "([0-9a-fA-F]){4}-"
+ "([0-9a-fA-F]){4}-"
+ "([0-9a-fA-F]){4}-"
+ "([0-9a-fA-F]){12}\s*$")
+
+ if RegFormatGuidPattern.match(GuidValue):
+ return True
+ else:
+ return False
+
+
+## Convert GUID string in C structure style to
+# xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+#
+# @param GuidValue: The GUID value in C structure format
+#
+def GuidStructureStringToGuidString(GuidValue):
+ GuidValueString = GuidValue.lower().replace("{", "").replace("}", "").\
+ replace(" ", "").replace(";", "")
+ GuidValueList = GuidValueString.split(",")
+ if len(GuidValueList) != 11:
+ return ''
+ try:
+ return "%08x-%04x-%04x-%02x%02x-%02x%02x%02x%02x%02x%02x" % (
+ int(GuidValueList[0], 16),
+ int(GuidValueList[1], 16),
+ int(GuidValueList[2], 16),
+ int(GuidValueList[3], 16),
+ int(GuidValueList[4], 16),
+ int(GuidValueList[5], 16),
+ int(GuidValueList[6], 16),
+ int(GuidValueList[7], 16),
+ int(GuidValueList[8], 16),
+ int(GuidValueList[9], 16),
+ int(GuidValueList[10], 16)
+ )
+ except BaseException:
+ return ''
+
+## Create directories
+#
+# @param Directory: The directory name
+#
+def CreateDirectory(Directory):
+ if Directory is None or Directory.strip() == "":
+ return True
+ try:
+ if not access(Directory, F_OK):
+ makedirs(Directory)
+ except BaseException:
+ return False
+ return True
+
+## Remove directories, including files and sub-directories in it
+#
+# @param Directory: The directory name
+#
+def RemoveDirectory(Directory, Recursively=False):
+ if Directory is None or Directory.strip() == "" or not \
+ os.path.exists(Directory):
+ return
+ if Recursively:
+ CurrentDirectory = getcwd()
+ chdir(Directory)
+ for File in listdir("."):
+ if os.path.isdir(File):
+ RemoveDirectory(File, Recursively)
+ else:
+ remove(File)
+ chdir(CurrentDirectory)
+ rmdir(Directory)
+
+## Store content in file
+#
+# This method is used to save file only when its content is changed. This is
+# quite useful for "make" system to decide what will be re-built and what
+# won't.
+#
+# @param File: The path of file
+# @param Content: The new content of the file
+# @param IsBinaryFile: The flag indicating if the file is binary file
+# or not
+#
+def SaveFileOnChange(File, Content, IsBinaryFile=True):
+ if os.path.exists(File):
+ if IsBinaryFile:
+ try:
+ if Content == __FileHookOpen__(File, "rb").read():
+ return False
+ except BaseException:
+ Logger.Error(None, ToolError.FILE_OPEN_FAILURE, ExtraData=File)
+ else:
+ try:
+ if Content == __FileHookOpen__(File, "r").read():
+ return False
+ except BaseException:
+ Logger.Error(None, ToolError.FILE_OPEN_FAILURE, ExtraData=File)
+
+ CreateDirectory(os.path.dirname(File))
+ if IsBinaryFile:
+ try:
+ FileFd = __FileHookOpen__(File, "wb")
+ FileFd.write(Content)
+ FileFd.close()
+ except BaseException:
+ Logger.Error(None, ToolError.FILE_CREATE_FAILURE, ExtraData=File)
+ else:
+ try:
+ FileFd = __FileHookOpen__(File, "w")
+ FileFd.write(Content)
+ FileFd.close()
+ except BaseException:
+ Logger.Error(None, ToolError.FILE_CREATE_FAILURE, ExtraData=File)
+
+ return True
+
+## Get all files of a directory
+#
+# @param Root: Root dir
+# @param SkipList : The files need be skipped
+#
+def GetFiles(Root, SkipList=None, FullPath=True):
+ OriPath = os.path.normpath(Root)
+ FileList = []
+ for Root, Dirs, Files in walk(Root):
+ if SkipList:
+ for Item in SkipList:
+ if Item in Dirs:
+ Dirs.remove(Item)
+ if Item in Files:
+ Files.remove(Item)
+ for Dir in Dirs:
+ if Dir.startswith('.'):
+ Dirs.remove(Dir)
+
+ for File in Files:
+ if File.startswith('.'):
+ continue
+ File = os.path.normpath(os.path.join(Root, File))
+ if not FullPath:
+ File = File[len(OriPath) + 1:]
+ FileList.append(File)
+
+ return FileList
+
+## Get all non-metadata files of a directory
+#
+# @param Root: Root Dir
+# @param SkipList : List of path need be skipped
+# @param FullPath: True if the returned file should be full path
+# @param PrefixPath: the path that need to be added to the files found
+# @return: the list of files found
+#
+def GetNonMetaDataFiles(Root, SkipList, FullPath, PrefixPath):
+ FileList = GetFiles(Root, SkipList, FullPath)
+ NewFileList = []
+ for File in FileList:
+ ExtName = os.path.splitext(File)[1]
+ #
+ # skip '.dec', '.inf', '.dsc', '.fdf' files
+ #
+ if ExtName.lower() not in ['.dec', '.inf', '.dsc', '.fdf']:
+ NewFileList.append(os.path.normpath(os.path.join(PrefixPath, File)))
+
+ return NewFileList
+
+## Check if given file exists or not
+#
+# @param File: File name or path to be checked
+# @param Dir: The directory the file is relative to
+#
+def ValidFile(File, Ext=None):
+ File = File.replace('\\', '/')
+ if Ext is not None:
+ FileExt = os.path.splitext(File)[1]
+ if FileExt.lower() != Ext.lower():
+ return False
+ if not os.path.exists(File):
+ return False
+ return True
+
+## RealPath
+#
+# @param File: File name or path to be checked
+# @param Dir: The directory the file is relative to
+# @param OverrideDir: The override directory
+#
+def RealPath(File, Dir='', OverrideDir=''):
+ NewFile = os.path.normpath(os.path.join(Dir, File))
+ NewFile = GlobalData.gALL_FILES[NewFile]
+ if not NewFile and OverrideDir:
+ NewFile = os.path.normpath(os.path.join(OverrideDir, File))
+ NewFile = GlobalData.gALL_FILES[NewFile]
+ return NewFile
+
+## RealPath2
+#
+# @param File: File name or path to be checked
+# @param Dir: The directory the file is relative to
+# @param OverrideDir: The override directory
+#
+def RealPath2(File, Dir='', OverrideDir=''):
+ if OverrideDir:
+ NewFile = GlobalData.gALL_FILES[os.path.normpath(os.path.join\
+ (OverrideDir, File))]
+ if NewFile:
+ if OverrideDir[-1] == os.path.sep:
+ return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)]
+ else:
+ return NewFile[len(OverrideDir) + 1:], \
+ NewFile[0:len(OverrideDir)]
+
+ NewFile = GlobalData.gALL_FILES[os.path.normpath(os.path.join(Dir, File))]
+ if NewFile:
+ if Dir:
+ if Dir[-1] == os.path.sep:
+ return NewFile[len(Dir):], NewFile[0:len(Dir)]
+ else:
+ return NewFile[len(Dir) + 1:], NewFile[0:len(Dir)]
+ else:
+ return NewFile, ''
+
+ return None, None
+
+## CommonPath
+#
+# @param PathList: PathList
+#
+def CommonPath(PathList):
+ Path1 = min(PathList).split(os.path.sep)
+ Path2 = max(PathList).split(os.path.sep)
+ for Index in range(min(len(Path1), len(Path2))):
+ if Path1[Index] != Path2[Index]:
+ return os.path.sep.join(Path1[:Index])
+ return os.path.sep.join(Path1)
+
+## PathClass
+#
+class PathClass(object):
+ def __init__(self, File='', Root='', AlterRoot='', Type='', IsBinary=False,
+ Arch='COMMON', ToolChainFamily='', Target='', TagName='', \
+ ToolCode=''):
+ self.Arch = Arch
+ self.File = str(File)
+ if os.path.isabs(self.File):
+ self.Root = ''
+ self.AlterRoot = ''
+ else:
+ self.Root = str(Root)
+ self.AlterRoot = str(AlterRoot)
+
+ #
+ # Remove any '.' and '..' in path
+ #
+ if self.Root:
+ self.Path = os.path.normpath(os.path.join(self.Root, self.File))
+ self.Root = os.path.normpath(CommonPath([self.Root, self.Path]))
+ #
+ # eliminate the side-effect of 'C:'
+ #
+ if self.Root[-1] == ':':
+ self.Root += os.path.sep
+ #
+ # file path should not start with path separator
+ #
+ if self.Root[-1] == os.path.sep:
+ self.File = self.Path[len(self.Root):]
+ else:
+ self.File = self.Path[len(self.Root) + 1:]
+ else:
+ self.Path = os.path.normpath(self.File)
+
+ self.SubDir, self.Name = os.path.split(self.File)
+ self.BaseName, self.Ext = os.path.splitext(self.Name)
+
+ if self.Root:
+ if self.SubDir:
+ self.Dir = os.path.join(self.Root, self.SubDir)
+ else:
+ self.Dir = self.Root
+ else:
+ self.Dir = self.SubDir
+
+ if IsBinary:
+ self.Type = Type
+ else:
+ self.Type = self.Ext.lower()
+
+ self.IsBinary = IsBinary
+ self.Target = Target
+ self.TagName = TagName
+ self.ToolCode = ToolCode
+ self.ToolChainFamily = ToolChainFamily
+
+ self._Key = None
+
+ ## Convert the object of this class to a string
+ #
+ # Convert member Path of the class to a string
+ #
+ def __str__(self):
+ return self.Path
+
+ ## Override __eq__ function
+ #
+ # Check whether PathClass are the same
+ #
+ def __eq__(self, Other):
+ if isinstance(Other, type(self)):
+ return self.Path == Other.Path
+ else:
+ return self.Path == str(Other)
+
+ ## Override __hash__ function
+ #
+ # Use Path as key in hash table
+ #
+ def __hash__(self):
+ return hash(self.Path)
+
+ ## _GetFileKey
+ #
+ def _GetFileKey(self):
+ if self._Key is None:
+ self._Key = self.Path.upper()
+ return self._Key
+ ## Validate
+ #
+ def Validate(self, Type='', CaseSensitive=True):
+ if GlobalData.gCASE_INSENSITIVE:
+ CaseSensitive = False
+ if Type and Type.lower() != self.Type:
+ return ToolError.FILE_TYPE_MISMATCH, '%s (expect %s but got %s)' % \
+ (self.File, Type, self.Type)
+
+ RealFile, RealRoot = RealPath2(self.File, self.Root, self.AlterRoot)
+ if not RealRoot and not RealFile:
+ RealFile = self.File
+ if self.AlterRoot:
+ RealFile = os.path.join(self.AlterRoot, self.File)
+ elif self.Root:
+ RealFile = os.path.join(self.Root, self.File)
+ return ToolError.FILE_NOT_FOUND, os.path.join(self.AlterRoot, RealFile)
+
+ ErrorCode = 0
+ ErrorInfo = ''
+ if RealRoot != self.Root or RealFile != self.File:
+ if CaseSensitive and (RealFile != self.File or \
+ (RealRoot != self.Root and RealRoot != \
+ self.AlterRoot)):
+ ErrorCode = ToolError.FILE_CASE_MISMATCH
+ ErrorInfo = self.File + '\n\t' + RealFile + \
+ " [in file system]"
+
+ self.SubDir, self.Name = os.path.split(RealFile)
+ self.BaseName, self.Ext = os.path.splitext(self.Name)
+ if self.SubDir:
+ self.Dir = os.path.join(RealRoot, self.SubDir)
+ else:
+ self.Dir = RealRoot
+ self.File = RealFile
+ self.Root = RealRoot
+ self.Path = os.path.join(RealRoot, RealFile)
+ return ErrorCode, ErrorInfo
+
+ Key = property(_GetFileKey)
+
+## Get current workspace
+#
+# get WORKSPACE from environment variable if present,if not use current working directory as WORKSPACE
+#
+def GetWorkspace():
+ #
+ # check WORKSPACE
+ #
+ if "WORKSPACE" in environ:
+ WorkspaceDir = os.path.normpath(environ["WORKSPACE"])
+ if not os.path.exists(WorkspaceDir):
+ Logger.Error("UPT",
+ ToolError.UPT_ENVIRON_MISSING_ERROR,
+ ST.ERR_WORKSPACE_NOTEXIST,
+ ExtraData="%s" % WorkspaceDir)
+ else:
+ WorkspaceDir = os.getcwd()
+
+ if WorkspaceDir[-1] == ':':
+ WorkspaceDir += os.sep
+
+ PackagesPath = os.environ.get("PACKAGES_PATH")
+ mws.setWs(WorkspaceDir, PackagesPath)
+
+ return WorkspaceDir, mws.PACKAGES_PATH
+
+## Get relative path
+#
+# use full path and workspace to get relative path
+# the destination of this function is mainly to resolve the root path issue(like c: or c:\)
+#
+# @param Fullpath: a string of fullpath
+# @param Workspace: a string of workspace
+#
+def GetRelativePath(Fullpath, Workspace):
+
+ RelativePath = ''
+ if Workspace.endswith(os.sep):
+ RelativePath = Fullpath[Fullpath.upper().find(Workspace.upper())+len(Workspace):]
+ else:
+ RelativePath = Fullpath[Fullpath.upper().find(Workspace.upper())+len(Workspace)+1:]
+
+ return RelativePath
+
+## Check whether all module types are in list
+#
+# check whether all module types (SUP_MODULE_LIST) are in list
+#
+# @param ModuleList: a list of ModuleType
+#
+def IsAllModuleList(ModuleList):
+ NewModuleList = [Module.upper() for Module in ModuleList]
+ for Module in SUP_MODULE_LIST:
+ if Module not in NewModuleList:
+ return False
+ else:
+ return True
+
+## Dictionary that use comment(GenericComment, TailComment) as value,
+# if a new comment which key already in the dic is inserted, then the
+# comment will be merged.
+# Key is (Statement, SupArch), when TailComment is added, it will ident
+# according to Statement
+#
+class MergeCommentDict(dict):
+ ## []= operator
+ #
+ def __setitem__(self, Key, CommentVal):
+ GenericComment, TailComment = CommentVal
+ if Key in self:
+ OrigVal1, OrigVal2 = dict.__getitem__(self, Key)
+ Statement = Key[0]
+ dict.__setitem__(self, Key, (OrigVal1 + GenericComment, OrigVal2 \
+ + len(Statement) * ' ' + TailComment))
+ else:
+ dict.__setitem__(self, Key, (GenericComment, TailComment))
+
+ ## =[] operator
+ #
+ def __getitem__(self, Key):
+ return dict.__getitem__(self, Key)
+
+
+## GenDummyHelpTextObj
+#
+# @retval HelpTxt: Generated dummy help text object
+#
+def GenDummyHelpTextObj():
+ HelpTxt = TextObject()
+ HelpTxt.SetLang(TAB_LANGUAGE_EN_US)
+ HelpTxt.SetString(' ')
+ return HelpTxt
+
+## ConvertVersionToDecimal, the minor version should be within 0 - 99
+# <HexVersion> ::= "0x" <Major> <Minor>
+# <Major> ::= (a-fA-F0-9){4}
+# <Minor> ::= (a-fA-F0-9){4}
+# <DecVersion> ::= (0-65535) ["." (0-99)]
+#
+# @param StringIn: The string contains version defined in INF file.
+# It can be Decimal or Hex
+#
+def ConvertVersionToDecimal(StringIn):
+ if IsValidHexVersion(StringIn):
+ Value = int(StringIn, 16)
+ Major = Value >> 16
+ Minor = Value & 0xFFFF
+ MinorStr = str(Minor)
+ if len(MinorStr) == 1:
+ MinorStr = '0' + MinorStr
+ return str(Major) + '.' + MinorStr
+ else:
+ if StringIn.find(TAB_SPLIT) != -1:
+ return StringIn
+ elif StringIn:
+ return StringIn + '.0'
+ else:
+ #
+ # when StringIn is '', return it directly
+ #
+ return StringIn
+
+## GetHelpStringByRemoveHashKey
+#
+# Remove hash key at the header of string and return the remain.
+#
+# @param String: The string need to be processed.
+#
+def GetHelpStringByRemoveHashKey(String):
+ ReturnString = ''
+ PattenRemoveHashKey = re.compile(r"^[#+\s]+", re.DOTALL)
+ String = String.strip()
+ if String == '':
+ return String
+
+ LineList = GetSplitValueList(String, END_OF_LINE)
+ for Line in LineList:
+ ValueList = PattenRemoveHashKey.split(Line)
+ if len(ValueList) == 1:
+ ReturnString += ValueList[0] + END_OF_LINE
+ else:
+ ReturnString += ValueList[1] + END_OF_LINE
+
+ if ReturnString.endswith('\n') and not ReturnString.endswith('\n\n') and ReturnString != '\n':
+ ReturnString = ReturnString[:-1]
+
+ return ReturnString
+
+## ConvPathFromAbsToRel
+#
+# Get relative file path from absolute path.
+#
+# @param Path: The string contain file absolute path.
+# @param Root: The string contain the parent path of Path in.
+#
+#
+def ConvPathFromAbsToRel(Path, Root):
+ Path = os.path.normpath(Path)
+ Root = os.path.normpath(Root)
+ FullPath = os.path.normpath(os.path.join(Root, Path))
+
+ #
+ # If Path is absolute path.
+ # It should be in Root.
+ #
+ if os.path.isabs(Path):
+ return FullPath[FullPath.find(Root) + len(Root) + 1:]
+
+ else:
+ return Path
+
+## ConvertPath
+#
+# Convert special characters to '_', '\' to '/'
+# return converted path: Test!1.inf -> Test_1.inf
+#
+# @param Path: Path to be converted
+#
+def ConvertPath(Path):
+ RetPath = ''
+ for Char in Path.strip():
+ if Char.isalnum() or Char in '.-_/':
+ RetPath = RetPath + Char
+ elif Char == '\\':
+ RetPath = RetPath + '/'
+ else:
+ RetPath = RetPath + '_'
+ return RetPath
+
+## ConvertSpec
+#
+# during install, convert the Spec string extract from UPD into INF allowable definition,
+# the difference is period is allowed in the former (not the first letter) but not in the latter.
+# return converted Spec string
+#
+# @param SpecStr: SpecStr to be converted
+#
+def ConvertSpec(SpecStr):
+ RetStr = ''
+ for Char in SpecStr:
+ if Char.isalnum() or Char == '_':
+ RetStr = RetStr + Char
+ else:
+ RetStr = RetStr + '_'
+
+ return RetStr
+
+
+## IsEqualList
+#
+# Judge two lists are identical(contain same item).
+# The rule is elements in List A are in List B and elements in List B are in List A.
+#
+# @param ListA, ListB Lists need to be judged.
+#
+# @return True ListA and ListB are identical
+# @return False ListA and ListB are different with each other
+#
+def IsEqualList(ListA, ListB):
+ if ListA == ListB:
+ return True
+
+ for ItemA in ListA:
+ if not ItemA in ListB:
+ return False
+
+ for ItemB in ListB:
+ if not ItemB in ListA:
+ return False
+
+ return True
+
+## ConvertArchList
+#
+# Convert item in ArchList if the start character is lower case.
+# In UDP spec, Arch is only allowed as: [A-Z]([a-zA-Z0-9])*
+#
+# @param ArchList The ArchList need to be converted.
+#
+# @return NewList The ArchList been converted.
+#
+def ConvertArchList(ArchList):
+ NewArchList = []
+ if not ArchList:
+ return NewArchList
+
+ if isinstance(ArchList, list):
+ for Arch in ArchList:
+ Arch = Arch.upper()
+ NewArchList.append(Arch)
+ elif isinstance(ArchList, str):
+ ArchList = ArchList.upper()
+ NewArchList.append(ArchList)
+
+ return NewArchList
+
+## ProcessLineExtender
+#
+# Process the LineExtender of Line in LineList.
+# If one line ends with a line extender, then it will be combined together with next line.
+#
+# @param LineList The LineList need to be processed.
+#
+# @return NewList The ArchList been processed.
+#
+def ProcessLineExtender(LineList):
+ NewList = []
+ Count = 0
+ while Count < len(LineList):
+ if LineList[Count].strip().endswith("\\") and Count + 1 < len(LineList):
+ NewList.append(LineList[Count].strip()[:-2] + LineList[Count + 1])
+ Count = Count + 1
+ else:
+ NewList.append(LineList[Count])
+
+ Count = Count + 1
+
+ return NewList
+
+## ProcessEdkComment
+#
+# Process EDK style comment in LineList: c style /* */ comment or cpp style // comment
+#
+#
+# @param LineList The LineList need to be processed.
+#
+# @return LineList The LineList been processed.
+# @return FirstPos Where Edk comment is first found, -1 if not found
+#
+def ProcessEdkComment(LineList):
+ FindEdkBlockComment = False
+ Count = 0
+ StartPos = -1
+ EndPos = -1
+ FirstPos = -1
+
+ while(Count < len(LineList)):
+ Line = LineList[Count].strip()
+ if Line.startswith("/*"):
+ #
+ # handling c style comment
+ #
+ StartPos = Count
+ while Count < len(LineList):
+ Line = LineList[Count].strip()
+ if Line.endswith("*/"):
+ if (Count == StartPos) and Line.strip() == '/*/':
+ Count = Count + 1
+ continue
+ EndPos = Count
+ FindEdkBlockComment = True
+ break
+ Count = Count + 1
+
+ if FindEdkBlockComment:
+ if FirstPos == -1:
+ FirstPos = StartPos
+ for Index in range(StartPos, EndPos+1):
+ LineList[Index] = ''
+ FindEdkBlockComment = False
+ elif Line.find("//") != -1 and not Line.startswith("#"):
+ #
+ # handling cpp style comment
+ #
+ LineList[Count] = Line.replace("//", '#')
+ if FirstPos == -1:
+ FirstPos = Count
+
+ Count = Count + 1
+
+ return LineList, FirstPos
+
+## GetLibInstanceInfo
+#
+# Get the information from Library Instance INF file.
+#
+# @param string. A string start with # and followed by INF file path
+# @param WorkSpace. The WorkSpace directory used to combined with INF file path.
+#
+# @return GUID, Version
+def GetLibInstanceInfo(String, WorkSpace, LineNo):
+
+ FileGuidString = ""
+ VerString = ""
+
+ OriginalString = String
+ String = String.strip()
+ if not String:
+ return None, None
+ #
+ # Remove "#" characters at the beginning
+ #
+ String = GetHelpStringByRemoveHashKey(String)
+ String = String.strip()
+
+ #
+ # Validate file name exist.
+ #
+ FullFileName = os.path.normpath(os.path.realpath(os.path.join(WorkSpace, String)))
+ if not (ValidFile(FullFileName)):
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_FILELIST_EXIST % (String),
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=LineNo,
+ ExtraData=OriginalString)
+
+ #
+ # Validate file exist/format.
+ #
+ if IsValidPath(String, WorkSpace):
+ IsValidFileFlag = True
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID % (String),
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=LineNo,
+ ExtraData=OriginalString)
+ return False
+ if IsValidFileFlag:
+ FileLinesList = []
+
+ try:
+ FInputfile = open(FullFileName, "r")
+ try:
+ FileLinesList = FInputfile.readlines()
+ except BaseException:
+ Logger.Error("InfParser",
+ ToolError.FILE_READ_FAILURE,
+ ST.ERR_FILE_OPEN_FAILURE,
+ File=FullFileName)
+ finally:
+ FInputfile.close()
+ except BaseException:
+ Logger.Error("InfParser",
+ ToolError.FILE_READ_FAILURE,
+ ST.ERR_FILE_OPEN_FAILURE,
+ File=FullFileName)
+
+ ReFileGuidPattern = re.compile("^\s*FILE_GUID\s*=.*$")
+ ReVerStringPattern = re.compile("^\s*VERSION_STRING\s*=.*$")
+
+ FileLinesList = ProcessLineExtender(FileLinesList)
+
+ for Line in FileLinesList:
+ if ReFileGuidPattern.match(Line):
+ FileGuidString = Line
+ if ReVerStringPattern.match(Line):
+ VerString = Line
+
+ if FileGuidString:
+ FileGuidString = GetSplitValueList(FileGuidString, '=', 1)[1]
+ if VerString:
+ VerString = GetSplitValueList(VerString, '=', 1)[1]
+
+ return FileGuidString, VerString
+
+## GetLocalValue
+#
+# Generate the local value for INF and DEC file. If Lang attribute not present, then use this value.
+# If present, and there is no element without the Lang attribute, and one of the elements has the rfc1766 code is
+# "en-x-tianocore", or "en-US" if "en-x-tianocore" was not found, or "en" if "en-US" was not found, or startswith 'en'
+# if 'en' was not found, then use this value.
+# If multiple entries of a tag exist which have the same language code, use the last entry.
+#
+# @param ValueList A list need to be processed.
+# @param UseFirstValue: True to use the first value, False to use the last value
+#
+# @return LocalValue
+def GetLocalValue(ValueList, UseFirstValue=False):
+ Value1 = ''
+ Value2 = ''
+ Value3 = ''
+ Value4 = ''
+ Value5 = ''
+ for (Key, Value) in ValueList:
+ if Key == TAB_LANGUAGE_EN_X:
+ if UseFirstValue:
+ if not Value1:
+ Value1 = Value
+ else:
+ Value1 = Value
+ if Key == TAB_LANGUAGE_EN_US:
+ if UseFirstValue:
+ if not Value2:
+ Value2 = Value
+ else:
+ Value2 = Value
+ if Key == TAB_LANGUAGE_EN:
+ if UseFirstValue:
+ if not Value3:
+ Value3 = Value
+ else:
+ Value3 = Value
+ if Key.startswith(TAB_LANGUAGE_EN):
+ if UseFirstValue:
+ if not Value4:
+ Value4 = Value
+ else:
+ Value4 = Value
+ if Key == '':
+ if UseFirstValue:
+ if not Value5:
+ Value5 = Value
+ else:
+ Value5 = Value
+
+ if Value1:
+ return Value1
+ if Value2:
+ return Value2
+ if Value3:
+ return Value3
+ if Value4:
+ return Value4
+ if Value5:
+ return Value5
+
+ return ''
+
+
+## GetCharIndexOutStr
+#
+# Get comment character index outside a string
+#
+# @param Line: The string to be checked
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval Index
+#
+def GetCharIndexOutStr(CommentCharacter, Line):
+ #
+ # remove whitespace
+ #
+ Line = Line.strip()
+
+ #
+ # Check whether comment character is in a string
+ #
+ InString = False
+ for Index in range(0, len(Line)):
+ if Line[Index] == '"':
+ InString = not InString
+ elif Line[Index] == CommentCharacter and InString :
+ pass
+ elif Line[Index] == CommentCharacter and (Index +1) < len(Line) and Line[Index+1] == CommentCharacter \
+ and not InString :
+ return Index
+ return -1
+
+## ValidateUNIFilePath
+#
+# Check the UNI file path
+#
+# @param FilePath: The UNI file path
+#
+def ValidateUNIFilePath(Path):
+ Suffix = Path[Path.rfind(TAB_SPLIT):]
+
+ #
+ # Check if the suffix is one of the '.uni', '.UNI', '.Uni'
+ #
+ if Suffix not in TAB_UNI_FILE_SUFFIXS:
+ Logger.Error("Unicode File Parser",
+ ToolError.FORMAT_INVALID,
+ Message=ST.ERR_UNI_FILE_SUFFIX_WRONG,
+ ExtraData=Path)
+
+ #
+ # Check if '..' in the file name(without suffix)
+ #
+ if (TAB_SPLIT + TAB_SPLIT) in Path:
+ Logger.Error("Unicode File Parser",
+ ToolError.FORMAT_INVALID,
+ Message=ST.ERR_UNI_FILE_NAME_INVALID,
+ ExtraData=Path)
+
+ #
+ # Check if the file name is valid according to the DEC and INF specification
+ #
+ Pattern = '[a-zA-Z0-9_][a-zA-Z0-9_\-\.]*'
+ FileName = Path.replace(Suffix, '')
+ InvalidCh = re.sub(Pattern, '', FileName)
+ if InvalidCh:
+ Logger.Error("Unicode File Parser",
+ ToolError.FORMAT_INVALID,
+ Message=ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID,
+ ExtraData=Path)
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/ParserValidate.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/ParserValidate.py
new file mode 100755
index 00000000..23340583
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/ParserValidate.py
@@ -0,0 +1,727 @@
+## @file ParserValidate.py
+# Functions for parser validation
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+ParserValidate
+'''
+
+import os.path
+import re
+import platform
+
+from Library.DataType import MODULE_LIST
+from Library.DataType import COMPONENT_TYPE_LIST
+from Library.DataType import PCD_USAGE_TYPE_LIST_OF_MODULE
+from Library.DataType import TAB_SPACE_SPLIT
+from Library.StringUtils import GetSplitValueList
+from Library.ExpressionValidate import IsValidBareCString
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+
+## __HexDigit() method
+#
+# Whether char input is a Hex data bit
+#
+# @param TempChar: The char to test
+#
+def __HexDigit(TempChar):
+ if (TempChar >= 'a' and TempChar <= 'f') or \
+ (TempChar >= 'A' and TempChar <= 'F') \
+ or (TempChar >= '0' and TempChar <= '9'):
+ return True
+ else:
+ return False
+
+## IsValidHex() method
+#
+# Whether char input is a Hex data.
+#
+# @param TempChar: The char to test
+#
+def IsValidHex(HexStr):
+ if not HexStr.upper().startswith("0X"):
+ return False
+ CharList = [c for c in HexStr[2:] if not __HexDigit(c)]
+ if len(CharList) == 0:
+ return True
+ else:
+ return False
+
+## Judge the input string is valid bool type or not.
+#
+# <TRUE> ::= {"TRUE"} {"true"} {"True"} {"0x1"} {"0x01"}
+# <FALSE> ::= {"FALSE"} {"false"} {"False"} {"0x0"} {"0x00"}
+# <BoolType> ::= {<TRUE>} {<FALSE>}
+#
+# @param BoolString: A string contained the value need to be judged.
+#
+def IsValidBoolType(BoolString):
+ #
+ # Valid True
+ #
+ if BoolString == 'TRUE' or \
+ BoolString == 'True' or \
+ BoolString == 'true' or \
+ BoolString == '0x1' or \
+ BoolString == '0x01':
+ return True
+ #
+ # Valid False
+ #
+ elif BoolString == 'FALSE' or \
+ BoolString == 'False' or \
+ BoolString == 'false' or \
+ BoolString == '0x0' or \
+ BoolString == '0x00':
+ return True
+ #
+ # Invalid bool type
+ #
+ else:
+ return False
+
+## Is Valid Module Type List or not
+#
+# @param ModuleTypeList: A list contain ModuleType strings need to be
+# judged.
+#
+def IsValidInfMoudleTypeList(ModuleTypeList):
+ for ModuleType in ModuleTypeList:
+ return IsValidInfMoudleType(ModuleType)
+
+## Is Valid Module Type or not
+#
+# @param ModuleType: A string contain ModuleType need to be judged.
+#
+def IsValidInfMoudleType(ModuleType):
+ if ModuleType in MODULE_LIST:
+ return True
+ else:
+ return False
+
+## Is Valid Component Type or not
+#
+# @param ComponentType: A string contain ComponentType need to be judged.
+#
+def IsValidInfComponentType(ComponentType):
+ if ComponentType.upper() in COMPONENT_TYPE_LIST:
+ return True
+ else:
+ return False
+
+
+## Is valid Tool Family or not
+#
+# @param ToolFamily: A string contain Tool Family need to be judged.
+# Family := [A-Z]([a-zA-Z0-9])*
+#
+def IsValidToolFamily(ToolFamily):
+ ReIsValidFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
+ if ReIsValidFamily.match(ToolFamily) is None:
+ return False
+ return True
+
+## Is valid Tool TagName or not
+#
+# The TagName sample is MYTOOLS and VS2005.
+#
+# @param TagName: A string contain Tool TagName need to be judged.
+#
+def IsValidToolTagName(TagName):
+ if TagName.strip() == '':
+ return True
+ if TagName.strip() == '*':
+ return True
+ if not IsValidWord(TagName):
+ return False
+ return True
+
+## Is valid arch or not
+#
+# @param Arch The arch string need to be validated
+# <OA> ::= (a-zA-Z)(A-Za-z0-9){0,}
+# <arch> ::= {"IA32"} {"X64"} {"IPF"} {"EBC"} {<OA>}
+# {"common"}
+# @param Arch: Input arch
+#
+def IsValidArch(Arch):
+ if Arch == 'common':
+ return True
+ ReIsValidArch = re.compile(r"^[a-zA-Z]+[a-zA-Z0-9]{0,}$", re.DOTALL)
+ if ReIsValidArch.match(Arch) is None:
+ return False
+ return True
+
+## Is valid family or not
+#
+# <Family> ::= {"MSFT"} {"GCC"} {"INTEL"} {<Usr>} {"*"}
+# <Usr> ::= [A-Z][A-Za-z0-9]{0,}
+#
+# @param family: The family string need to be validated
+#
+def IsValidFamily(Family):
+ Family = Family.strip()
+ if Family == '*':
+ return True
+
+ if Family == '':
+ return True
+
+ ReIsValidFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
+ if ReIsValidFamily.match(Family) is None:
+ return False
+ return True
+
+## Is valid build option name or not
+#
+# @param BuildOptionName: The BuildOptionName string need to be validated
+#
+def IsValidBuildOptionName(BuildOptionName):
+ if not BuildOptionName:
+ return False
+
+ ToolOptionList = GetSplitValueList(BuildOptionName, '_', 4)
+
+ if len(ToolOptionList) != 5:
+ return False
+
+ ReIsValidBuildOption1 = re.compile(r"^\s*(\*)|([A-Z][a-zA-Z0-9]*)$")
+ ReIsValidBuildOption2 = re.compile(r"^\s*(\*)|([a-zA-Z][a-zA-Z0-9]*)$")
+
+ if ReIsValidBuildOption1.match(ToolOptionList[0]) is None:
+ return False
+
+ if ReIsValidBuildOption1.match(ToolOptionList[1]) is None:
+ return False
+
+ if ReIsValidBuildOption2.match(ToolOptionList[2]) is None:
+ return False
+
+ if ToolOptionList[3] == "*" and ToolOptionList[4] not in ['FAMILY', 'DLL', 'DPATH']:
+ return False
+
+ return True
+
+## IsValidToken
+#
+# Check if pattern string matches total token
+#
+# @param ReString: regular string
+# @param Token: Token to be matched
+#
+def IsValidToken(ReString, Token):
+ Match = re.compile(ReString).match(Token)
+ return Match and Match.start() == 0 and Match.end() == len(Token)
+
+## IsValidPath
+#
+# Check if path exist
+#
+# @param Path: Absolute path or relative path to be checked
+# @param Root: Root path
+#
+def IsValidPath(Path, Root):
+ Path = Path.strip()
+ OrigPath = Path.replace('\\', '/')
+
+ Path = os.path.normpath(Path).replace('\\', '/')
+ Root = os.path.normpath(Root).replace('\\', '/')
+ FullPath = mws.join(Root, Path)
+
+ if not os.path.exists(FullPath):
+ return False
+
+ #
+ # If Path is absolute path.
+ # It should be in Root.
+ #
+ if os.path.isabs(Path):
+ if not Path.startswith(Root):
+ return False
+ return True
+
+ #
+ # Check illegal character
+ #
+ for Rel in ['/', './', '../']:
+ if OrigPath.startswith(Rel):
+ return False
+ for Rel in ['//', '/./', '/../']:
+ if Rel in OrigPath:
+ return False
+ for Rel in ['/.', '/..', '/']:
+ if OrigPath.endswith(Rel):
+ return False
+
+ Path = Path.rstrip('/')
+
+ #
+ # Check relative path
+ #
+ for Word in Path.split('/'):
+ if not IsValidWord(Word):
+ return False
+
+ return True
+
+## IsValidInstallPath
+#
+# Check if an install path valid or not.
+#
+# Absolute path or path starts with '.' or path contains '..' are invalid.
+#
+# @param Path: path to be checked
+#
+def IsValidInstallPath(Path):
+ if platform.platform().find("Windows") >= 0:
+ if os.path.isabs(Path):
+ return False
+ else:
+ if Path[1:2] == ':':
+ return False
+ if os.path.isabs(Path):
+ return False
+ if Path.startswith('.'):
+ return False
+
+ if Path.find('..') != -1:
+ return False
+
+ return True
+
+
+## IsValidCFormatGuid
+#
+# Check if GUID format has the from of {8,4,4,{2,2,2,2,2,2,2,2}}
+#
+# @param Guid: Guid to be checked
+#
+def IsValidCFormatGuid(Guid):
+ #
+ # Valid: { 0xf0b11735, 0x87a0, 0x4193, {0xb2, 0x66, 0x53, 0x8c, 0x38,
+ # 0xaf, 0x48, 0xce }}
+ # Invalid: { 0xf0b11735, 0x87a0, 0x4193, {0xb2, 0x66, 0x53, 0x8c, 0x38,
+ # 0xaf, 0x48, 0xce }} 0x123
+ # Invalid: { 0xf0b1 1735, 0x87a0, 0x4193, {0xb2, 0x66, 0x53, 0x8c, 0x38,
+ # 0xaf, 0x48, 0xce }}
+ #
+ List = ['{', 10, ',', 6, ',', 6, ',{', 4, ',', 4, ',', 4,
+ ',', 4, ',', 4, ',', 4, ',', 4, ',', 4, '}}']
+ Index = 0
+ Value = ''
+ SepValue = ''
+ for Char in Guid:
+ if Char not in '{},\t ':
+ Value += Char
+ continue
+ if Value:
+ try:
+ #
+ # Index may out of bound
+ #
+ if not SepValue or SepValue != List[Index]:
+ return False
+ Index += 1
+ SepValue = ''
+
+ if not Value.startswith('0x') and not Value.startswith('0X'):
+ return False
+
+ #
+ # Index may out of bound
+ #
+ if not isinstance(List[Index], type(1)) or \
+ len(Value) > List[Index] or len(Value) < 3:
+ return False
+
+ #
+ # Check if string can be converted to integer
+ # Throw exception if not
+ #
+ int(Value, 16)
+ except BaseException:
+ #
+ # Exception caught means invalid format
+ #
+ return False
+ Value = ''
+ Index += 1
+ if Char in '{},':
+ SepValue += Char
+
+ return SepValue == '}}' and Value == ''
+
+## IsValidPcdType
+#
+# Check whether the PCD type is valid
+#
+# @param PcdTypeString: The PcdType string need to be checked.
+#
+def IsValidPcdType(PcdTypeString):
+ if PcdTypeString.upper() in PCD_USAGE_TYPE_LIST_OF_MODULE:
+ return True
+ else:
+ return False
+
+## IsValidWord
+#
+# Check whether the word is valid.
+# <Word> ::= (a-zA-Z0-9_)(a-zA-Z0-9_-){0,} Alphanumeric characters with
+# optional
+# dash "-" and/or underscore "_" characters. No whitespace
+# characters are permitted.
+#
+# @param Word: The word string need to be checked.
+#
+def IsValidWord(Word):
+ if not Word:
+ return False
+ #
+ # The first char should be alpha, _ or Digit.
+ #
+ if not Word[0].isalnum() and \
+ not Word[0] == '_' and \
+ not Word[0].isdigit():
+ return False
+
+ LastChar = ''
+ for Char in Word[1:]:
+ if (not Char.isalpha()) and \
+ (not Char.isdigit()) and \
+ Char != '-' and \
+ Char != '_' and \
+ Char != '.':
+ return False
+ if Char == '.' and LastChar == '.':
+ return False
+ LastChar = Char
+
+ return True
+
+
+## IsValidSimpleWord
+#
+# Check whether the SimpleWord is valid.
+# <SimpleWord> ::= (a-zA-Z0-9)(a-zA-Z0-9_-){0,}
+# A word that cannot contain a period character.
+#
+# @param Word: The word string need to be checked.
+#
+def IsValidSimpleWord(Word):
+ ReIsValidSimpleWord = \
+ re.compile(r"^[0-9A-Za-z][0-9A-Za-z\-_]*$", re.DOTALL)
+ Word = Word.strip()
+ if not Word:
+ return False
+
+ if not ReIsValidSimpleWord.match(Word):
+ return False
+
+ return True
+
+## IsValidDecVersion
+#
+# Check whether the decimal version is valid.
+# <DecVersion> ::= (0-9){1,} ["." (0-9){1,}]
+#
+# @param Word: The word string need to be checked.
+#
+def IsValidDecVersion(Word):
+ if Word.find('.') > -1:
+ ReIsValidDecVersion = re.compile(r"[0-9]+\.?[0-9]+$")
+ else:
+ ReIsValidDecVersion = re.compile(r"[0-9]+$")
+ if ReIsValidDecVersion.match(Word) is None:
+ return False
+ return True
+
+## IsValidHexVersion
+#
+# Check whether the hex version is valid.
+# <HexVersion> ::= "0x" <Major> <Minor>
+# <Major> ::= <HexDigit>{4}
+# <Minor> ::= <HexDigit>{4}
+#
+# @param Word: The word string need to be checked.
+#
+def IsValidHexVersion(Word):
+ ReIsValidHexVersion = re.compile(r"[0][xX][0-9A-Fa-f]{8}$", re.DOTALL)
+ if ReIsValidHexVersion.match(Word) is None:
+ return False
+
+ return True
+
+## IsValidBuildNumber
+#
+# Check whether the BUILD_NUMBER is valid.
+# ["BUILD_NUMBER" "=" <Integer>{1,4} <EOL>]
+#
+# @param Word: The BUILD_NUMBER string need to be checked.
+#
+def IsValidBuildNumber(Word):
+ ReIsValieBuildNumber = re.compile(r"[0-9]{1,4}$", re.DOTALL)
+ if ReIsValieBuildNumber.match(Word) is None:
+ return False
+
+ return True
+
+## IsValidDepex
+#
+# Check whether the Depex is valid.
+#
+# @param Word: The Depex string need to be checked.
+#
+def IsValidDepex(Word):
+ Index = Word.upper().find("PUSH")
+ if Index > -1:
+ return IsValidCFormatGuid(Word[Index+4:].strip())
+
+ ReIsValidCName = re.compile(r"^[A-Za-z_][0-9A-Za-z_\s\.]*$", re.DOTALL)
+ if ReIsValidCName.match(Word) is None:
+ return False
+
+ return True
+
+## IsValidNormalizedString
+#
+# Check
+# <NormalizedString> ::= <DblQuote> [{<Word>} {<Space>}]{1,} <DblQuote>
+# <Space> ::= 0x20
+#
+# @param String: string to be checked
+#
+def IsValidNormalizedString(String):
+ if String == '':
+ return True
+
+ for Char in String:
+ if Char == '\t':
+ return False
+
+ StringList = GetSplitValueList(String, TAB_SPACE_SPLIT)
+
+ for Item in StringList:
+ if not Item:
+ continue
+ if not IsValidWord(Item):
+ return False
+
+ return True
+
+## IsValidIdString
+#
+# Check whether the IdString is valid.
+#
+# @param IdString: The IdString need to be checked.
+#
+def IsValidIdString(String):
+ if IsValidSimpleWord(String.strip()):
+ return True
+
+ if String.strip().startswith('"') and \
+ String.strip().endswith('"'):
+ String = String[1:-1]
+ if String.strip() == "":
+ return True
+ if IsValidNormalizedString(String):
+ return True
+
+ return False
+
+## IsValidVersionString
+#
+# Check whether the VersionString is valid.
+# <AsciiString> ::= [ [<WhiteSpace>]{0,} [<AsciiChars>]{0,} ] {0,}
+# <WhiteSpace> ::= {<Tab>} {<Space>}
+# <Tab> ::= 0x09
+# <Space> ::= 0x20
+# <AsciiChars> ::= (0x21 - 0x7E)
+#
+# @param VersionString: The VersionString need to be checked.
+#
+def IsValidVersionString(VersionString):
+ VersionString = VersionString.strip()
+ for Char in VersionString:
+ if not (Char >= 0x21 and Char <= 0x7E):
+ return False
+
+ return True
+
+## IsValidPcdValue
+#
+# Check whether the PcdValue is valid.
+#
+# @param VersionString: The PcdValue need to be checked.
+#
+def IsValidPcdValue(PcdValue):
+ for Char in PcdValue:
+ if Char == '\n' or Char == '\t' or Char == '\f':
+ return False
+
+ #
+ # <Boolean>
+ #
+ if IsValidFeatureFlagExp(PcdValue, True)[0]:
+ return True
+
+ #
+ # <Number> ::= {<Integer>} {<HexNumber>}
+ # <Integer> ::= {(0-9)} {(1-9)(0-9){1,}}
+ # <HexNumber> ::= "0x" <HexDigit>{1,}
+ # <HexDigit> ::= (a-fA-F0-9)
+ #
+ if IsValidHex(PcdValue):
+ return True
+
+ ReIsValidIntegerSingle = re.compile(r"^\s*[0-9]\s*$", re.DOTALL)
+ if ReIsValidIntegerSingle.match(PcdValue) is not None:
+ return True
+
+ ReIsValidIntegerMulti = re.compile(r"^\s*[1-9][0-9]+\s*$", re.DOTALL)
+ if ReIsValidIntegerMulti.match(PcdValue) is not None:
+ return True
+
+ #
+ # <StringVal> ::= {<StringType>} {<Array>} {"$(" <MACRO> ")"}
+ # <StringType> ::= {<UnicodeString>} {<CString>}
+ #
+ ReIsValidStringType = re.compile(r"^\s*[\"L].*[\"]\s*$")
+ if ReIsValidStringType.match(PcdValue):
+ IsTrue = False
+ if PcdValue.strip().startswith('L\"'):
+ StringValue = PcdValue.strip().lstrip('L\"').rstrip('\"')
+ if IsValidBareCString(StringValue):
+ IsTrue = True
+ elif PcdValue.strip().startswith('\"'):
+ StringValue = PcdValue.strip().lstrip('\"').rstrip('\"')
+ if IsValidBareCString(StringValue):
+ IsTrue = True
+ if IsTrue:
+ return IsTrue
+
+ #
+ # <Array> ::= {<CArray>} {<NList>} {<CFormatGUID>}
+ # <CArray> ::= "{" [<NList>] <CArray>{0,} "}"
+ # <NList> ::= <HexByte> ["," <HexByte>]{0,}
+ # <HexDigit> ::= (a-fA-F0-9)
+ # <HexByte> ::= "0x" <HexDigit>{1,2}
+ #
+ if IsValidCFormatGuid(PcdValue):
+ return True
+
+ ReIsValidByteHex = re.compile(r"^\s*0x[0-9a-fA-F]{1,2}\s*$", re.DOTALL)
+ if PcdValue.strip().startswith('{') and PcdValue.strip().endswith('}') :
+ StringValue = PcdValue.strip().lstrip('{').rstrip('}')
+ ValueList = StringValue.split(',')
+ AllValidFlag = True
+ for ValueItem in ValueList:
+ if not ReIsValidByteHex.match(ValueItem.strip()):
+ AllValidFlag = False
+
+ if AllValidFlag:
+ return True
+
+ #
+ # NList
+ #
+ AllValidFlag = True
+ ValueList = PcdValue.split(',')
+ for ValueItem in ValueList:
+ if not ReIsValidByteHex.match(ValueItem.strip()):
+ AllValidFlag = False
+
+ if AllValidFlag:
+ return True
+
+ return False
+
+## IsValidCVariableName
+#
+# Check whether the PcdValue is valid.
+#
+# @param VersionString: The PcdValue need to be checked.
+#
+def IsValidCVariableName(CName):
+ ReIsValidCName = re.compile(r"^[A-Za-z_][0-9A-Za-z_]*$", re.DOTALL)
+ if ReIsValidCName.match(CName) is None:
+ return False
+
+ return True
+
+## IsValidIdentifier
+#
+# <Identifier> ::= <NonDigit> <Chars>{0,}
+# <Chars> ::= (a-zA-Z0-9_)
+# <NonDigit> ::= (a-zA-Z_)
+#
+# @param Ident: identifier to be checked
+#
+def IsValidIdentifier(Ident):
+ ReIdent = re.compile(r"^[A-Za-z_][0-9A-Za-z_]*$", re.DOTALL)
+ if ReIdent.match(Ident) is None:
+ return False
+
+ return True
+
+## IsValidDecVersionVal
+#
+# {(0-9){1,} "." (0-99)}
+#
+# @param Ver: version to be checked
+#
+def IsValidDecVersionVal(Ver):
+ ReVersion = re.compile(r"[0-9]+(\.[0-9]{1,2})$")
+
+ if ReVersion.match(Ver) is None:
+ return False
+
+ return True
+
+
+## IsValidLibName
+#
+# (A-Z)(a-zA-Z0-9){0,} and could not be "NULL"
+#
+def IsValidLibName(LibName):
+ if LibName == 'NULL':
+ return False
+ ReLibName = re.compile("^[A-Z]+[a-zA-Z0-9]*$")
+ if not ReLibName.match(LibName):
+ return False
+
+ return True
+
+# IsValidUserId
+#
+# <UserId> ::= (a-zA-Z)(a-zA-Z0-9_.){0,}
+# Words that contain period "." must be encapsulated in double quotation marks.
+#
+def IsValidUserId(UserId):
+ UserId = UserId.strip()
+ Quoted = False
+ if UserId.startswith('"') and UserId.endswith('"'):
+ Quoted = True
+ UserId = UserId[1:-1]
+ if not UserId or not UserId[0].isalpha():
+ return False
+ for Char in UserId[1:]:
+ if not Char.isalnum() and not Char in '_.':
+ return False
+ if Char == '.' and not Quoted:
+ return False
+ return True
+
+#
+# Check if a UTF16-LE file has a BOM header
+#
+def CheckUTF16FileHeader(File):
+ FileIn = open(File, 'rb').read(2)
+ if FileIn != b'\xff\xfe':
+ return False
+
+ return True
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Parsing.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Parsing.py
new file mode 100755
index 00000000..802f81e6
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Parsing.py
@@ -0,0 +1,1015 @@
+## @file
+# This file is used to define common parsing related functions used in parsing
+# INF/DEC/DSC process
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Parsing
+'''
+from __future__ import absolute_import
+
+##
+# Import Modules
+#
+import os.path
+import re
+
+from Library.StringUtils import RaiseParserError
+from Library.StringUtils import GetSplitValueList
+from Library.StringUtils import CheckFileType
+from Library.StringUtils import CheckFileExist
+from Library.StringUtils import CleanString
+from Library.StringUtils import NormPath
+
+from Logger.ToolError import FILE_NOT_FOUND
+from Logger.ToolError import FatalError
+from Logger.ToolError import FORMAT_INVALID
+
+from Library import DataType
+
+from Library.Misc import GuidStructureStringToGuidString
+from Library.Misc import CheckGuidRegFormat
+from Logger import StringTable as ST
+import Logger.Log as Logger
+
+from Parser.DecParser import Dec
+from . import GlobalData
+
+gPKG_INFO_DICT = {}
+
+## GetBuildOption
+#
+# Parse a string with format "[<Family>:]<ToolFlag>=Flag"
+# Return (Family, ToolFlag, Flag)
+#
+# @param String: String with BuildOption statement
+# @param File: The file which defines build option, used in error report
+#
+def GetBuildOption(String, File, LineNo= -1):
+ (Family, ToolChain, Flag) = ('', '', '')
+ if String.find(DataType.TAB_EQUAL_SPLIT) < 0:
+ RaiseParserError(String, 'BuildOptions', File, \
+ '[<Family>:]<ToolFlag>=Flag', LineNo)
+ else:
+ List = GetSplitValueList(String, DataType.TAB_EQUAL_SPLIT, MaxSplit=1)
+ if List[0].find(':') > -1:
+ Family = List[0][ : List[0].find(':')].strip()
+ ToolChain = List[0][List[0].find(':') + 1 : ].strip()
+ else:
+ ToolChain = List[0].strip()
+ Flag = List[1].strip()
+ return (Family, ToolChain, Flag)
+
+## Get Library Class
+#
+# Get Library of Dsc as <LibraryClassKeyWord>|<LibraryInstance>
+#
+# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
+# @param ContainerFile: The file which describes the library class, used for
+# error report
+#
+def GetLibraryClass(Item, ContainerFile, WorkspaceDir, LineNo= -1):
+ List = GetSplitValueList(Item[0])
+ SupMod = DataType.SUP_MODULE_LIST_STRING
+ if len(List) != 2:
+ RaiseParserError(Item[0], 'LibraryClasses', ContainerFile, \
+ '<LibraryClassKeyWord>|<LibraryInstance>')
+ else:
+ CheckFileType(List[1], '.Inf', ContainerFile, \
+ 'library class instance', Item[0], LineNo)
+ CheckFileExist(WorkspaceDir, List[1], ContainerFile, \
+ 'LibraryClasses', Item[0], LineNo)
+ if Item[1] != '':
+ SupMod = Item[1]
+
+ return (List[0], List[1], SupMod)
+
+## Get Library Class
+#
+# Get Library of Dsc as <LibraryClassKeyWord>[|<LibraryInstance>]
+# [|<TokenSpaceGuidCName>.<PcdCName>]
+#
+# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
+# @param ContainerFile: The file which describes the library class, used for
+# error report
+#
+def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo= -1):
+ ItemList = GetSplitValueList((Item[0] + DataType.TAB_VALUE_SPLIT * 2))
+ SupMod = DataType.SUP_MODULE_LIST_STRING
+
+ if len(ItemList) > 5:
+ RaiseParserError\
+ (Item[0], 'LibraryClasses', ContainerFile, \
+ '<LibraryClassKeyWord>[|<LibraryInstance>]\
+ [|<TokenSpaceGuidCName>.<PcdCName>]')
+ else:
+ CheckFileType(ItemList[1], '.Inf', ContainerFile, 'LibraryClasses', \
+ Item[0], LineNo)
+ CheckFileExist(WorkspaceDir, ItemList[1], ContainerFile, \
+ 'LibraryClasses', Item[0], LineNo)
+ if ItemList[2] != '':
+ CheckPcdTokenInfo(ItemList[2], 'LibraryClasses', \
+ ContainerFile, LineNo)
+ if Item[1] != '':
+ SupMod = Item[1]
+
+ return (ItemList[0], ItemList[1], ItemList[2], SupMod)
+
+## CheckPcdTokenInfo
+#
+# Check if PcdTokenInfo is following <TokenSpaceGuidCName>.<PcdCName>
+#
+# @param TokenInfoString: String to be checked
+# @param Section: Used for error report
+# @param File: Used for error report
+#
+def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo= -1):
+ Format = '<TokenSpaceGuidCName>.<PcdCName>'
+ if TokenInfoString != '' and TokenInfoString is not None:
+ TokenInfoList = GetSplitValueList(TokenInfoString, DataType.TAB_SPLIT)
+ if len(TokenInfoList) == 2:
+ return True
+
+ RaiseParserError(TokenInfoString, Section, File, Format, LineNo)
+
+## Get Pcd
+#
+# Get Pcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>
+# [|<Type>|<MaximumDatumSize>]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|
+# <Value>[|<Type>|<MaximumDatumSize>]
+# @param ContainerFile: The file which describes the pcd, used for error
+# report
+
+#
+def GetPcd(Item, Type, ContainerFile, LineNo= -1):
+ TokenGuid, TokenName, Value, MaximumDatumSize, Token = '', '', '', '', ''
+ List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT * 2)
+
+ if len(List) < 4 or len(List) > 6:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
+ '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>\
+ [|<Type>|<MaximumDatumSize>]', LineNo)
+ else:
+ Value = List[1]
+ MaximumDatumSize = List[2]
+ Token = List[3]
+
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, Value, MaximumDatumSize, Token, Type)
+
+## Get FeatureFlagPcd
+#
+# Get FeatureFlagPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>
+# .<TokenCName>|TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error
+# report
+#
+def GetFeatureFlagPcd(Item, Type, ContainerFile, LineNo= -1):
+ TokenGuid, TokenName, Value = '', '', ''
+ List = GetSplitValueList(Item)
+ if len(List) != 2:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
+ '<PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE', \
+ LineNo)
+ else:
+ Value = List[1]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, Value, Type)
+
+## Get DynamicDefaultPcd
+#
+# Get DynamicDefaultPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>
+# |<Value>[|<DatumTyp>[|<MaxDatumSize>]]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|
+# TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error
+# report
+#
+def GetDynamicDefaultPcd(Item, Type, ContainerFile, LineNo= -1):
+ TokenGuid, TokenName, Value, DatumTyp, MaxDatumSize = '', '', '', '', ''
+ List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT * 2)
+ if len(List) < 4 or len(List) > 8:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
+ '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>\
+ [|<DatumTyp>[|<MaxDatumSize>]]', LineNo)
+ else:
+ Value = List[1]
+ DatumTyp = List[2]
+ MaxDatumSize = List[3]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, Value, DatumTyp, MaxDatumSize, Type)
+
+## Get DynamicHiiPcd
+#
+# Get DynamicHiiPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<String>|
+# <VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|
+# TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error
+# report
+#
+def GetDynamicHiiPcd(Item, Type, ContainerFile, LineNo= -1):
+ TokenGuid, TokenName, List1, List2, List3, List4, List5 = \
+ '', '', '', '', '', '', ''
+ List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT * 2)
+ if len(List) < 6 or len(List) > 8:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
+ '<PcdTokenSpaceGuidCName>.<TokenCName>|<String>|\
+ <VariableGuidCName>|<VariableOffset>[|<DefaultValue>\
+ [|<MaximumDatumSize>]]', LineNo)
+ else:
+ List1, List2, List3, List4, List5 = \
+ List[1], List[2], List[3], List[4], List[5]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, List1, List2, List3, List4, List5, Type)
+
+## Get DynamicVpdPcd
+#
+# Get DynamicVpdPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|
+# <VpdOffset>[|<MaximumDatumSize>]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>
+# |TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error
+# report
+#
+def GetDynamicVpdPcd(Item, Type, ContainerFile, LineNo= -1):
+ TokenGuid, TokenName, List1, List2 = '', '', '', ''
+ List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT)
+ if len(List) < 3 or len(List) > 4:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
+ '<PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>\
+ [|<MaximumDatumSize>]', LineNo)
+ else:
+ List1, List2 = List[1], List[2]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, List1, List2, Type)
+
+## GetComponent
+#
+# Parse block of the components defined in dsc file
+# Set KeyValues as [ ['component name', [lib1, lib2, lib3],
+# [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
+#
+# @param Lines: The content to be parsed
+# @param KeyValues: To store data after parsing
+#
+def GetComponent(Lines, KeyValues):
+ (FindBlock, FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, FindPcdsDynamic, \
+ FindPcdsDynamicEx) = (False, False, False, False, False, False, False, \
+ False)
+ ListItem = None
+ LibraryClassItem = []
+ BuildOption = []
+ Pcd = []
+
+ for Line in Lines:
+ Line = Line[0]
+ #
+ # Ignore !include statement
+ #
+ if Line.upper().find(DataType.TAB_INCLUDE.upper() + ' ') > -1 or \
+ Line.upper().find(DataType.TAB_DEFINE + ' ') > -1:
+ continue
+
+ if FindBlock == False:
+ ListItem = Line
+ #
+ # find '{' at line tail
+ #
+ if Line.endswith('{'):
+ FindBlock = True
+ ListItem = CleanString(Line.rsplit('{', 1)[0], \
+ DataType.TAB_COMMENT_SPLIT)
+
+ #
+ # Parse a block content
+ #
+ if FindBlock:
+ if Line.find('<LibraryClasses>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (True, False, False, False, False, False, False)
+ continue
+ if Line.find('<BuildOptions>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, True, False, False, False, False, False)
+ continue
+ if Line.find('<PcdsFeatureFlag>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, False, True, False, False, False, False)
+ continue
+ if Line.find('<PcdsPatchableInModule>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, False, False, True, False, False, False)
+ continue
+ if Line.find('<PcdsFixedAtBuild>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, False, False, False, True, False, False)
+ continue
+ if Line.find('<PcdsDynamic>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, False, False, False, False, True, False)
+ continue
+ if Line.find('<PcdsDynamicEx>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, False, False, False, False, False, True)
+ continue
+ if Line.endswith('}'):
+ #
+ # find '}' at line tail
+ #
+ KeyValues.append([ListItem, LibraryClassItem, \
+ BuildOption, Pcd])
+ (FindBlock, FindLibraryClass, FindBuildOption, \
+ FindPcdsFeatureFlag, FindPcdsPatchableInModule, \
+ FindPcdsFixedAtBuild, FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, False, False, False, False, False, False, False)
+ LibraryClassItem, BuildOption, Pcd = [], [], []
+ continue
+
+ if FindBlock:
+ if FindLibraryClass:
+ LibraryClassItem.append(Line)
+ elif FindBuildOption:
+ BuildOption.append(Line)
+ elif FindPcdsFeatureFlag:
+ Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG_NULL, Line))
+ elif FindPcdsPatchableInModule:
+ Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE_NULL, Line))
+ elif FindPcdsFixedAtBuild:
+ Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD_NULL, Line))
+ elif FindPcdsDynamic:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC_DEFAULT_NULL, Line))
+ elif FindPcdsDynamicEx:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, Line))
+ else:
+ KeyValues.append([ListItem, [], [], []])
+
+ return True
+
+## GetExec
+#
+# Parse a string with format "InfFilename [EXEC = ExecFilename]"
+# Return (InfFilename, ExecFilename)
+#
+# @param String: String with EXEC statement
+#
+def GetExec(String):
+ InfFilename = ''
+ ExecFilename = ''
+ if String.find('EXEC') > -1:
+ InfFilename = String[ : String.find('EXEC')].strip()
+ ExecFilename = String[String.find('EXEC') + len('EXEC') : ].strip()
+ else:
+ InfFilename = String.strip()
+
+ return (InfFilename, ExecFilename)
+
+## GetComponents
+#
+# Parse block of the components defined in dsc file
+# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3],
+# [pcd1, pcd2, pcd3]], ...]
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval True Get component successfully
+#
+def GetComponents(Lines, KeyValues, CommentCharacter):
+ if Lines.find(DataType.TAB_SECTION_END) > -1:
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ (FindBlock, FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, FindPcdsDynamic, \
+ FindPcdsDynamicEx) = \
+ (False, False, False, False, False, False, False, False)
+ ListItem = None
+ LibraryClassItem = []
+ BuildOption = []
+ Pcd = []
+
+ LineList = Lines.split('\n')
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line is None or Line == '':
+ continue
+
+ if FindBlock == False:
+ ListItem = Line
+ #
+ # find '{' at line tail
+ #
+ if Line.endswith('{'):
+ FindBlock = True
+ ListItem = CleanString(Line.rsplit('{', 1)[0], CommentCharacter)
+
+ #
+ # Parse a block content
+ #
+ if FindBlock:
+ if Line.find('<LibraryClasses>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (True, False, False, False, False, False, False)
+ continue
+ if Line.find('<BuildOptions>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, True, False, False, False, False, False)
+ continue
+ if Line.find('<PcdsFeatureFlag>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, False, True, False, False, False, False)
+ continue
+ if Line.find('<PcdsPatchableInModule>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, False, False, True, False, False, False)
+ continue
+ if Line.find('<PcdsFixedAtBuild>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, False, False, False, True, False, False)
+ continue
+ if Line.find('<PcdsDynamic>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, False, False, False, False, True, False)
+ continue
+ if Line.find('<PcdsDynamicEx>') != -1:
+ (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
+ FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
+ FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, False, False, False, False, False, True)
+ continue
+ if Line.endswith('}'):
+ #
+ # find '}' at line tail
+ #
+ KeyValues.append([ListItem, LibraryClassItem, BuildOption, \
+ Pcd])
+ (FindBlock, FindLibraryClass, FindBuildOption, \
+ FindPcdsFeatureFlag, FindPcdsPatchableInModule, \
+ FindPcdsFixedAtBuild, FindPcdsDynamic, FindPcdsDynamicEx) = \
+ (False, False, False, False, False, False, False, False)
+ LibraryClassItem, BuildOption, Pcd = [], [], []
+ continue
+
+ if FindBlock:
+ if FindLibraryClass:
+ LibraryClassItem.append(Line)
+ elif FindBuildOption:
+ BuildOption.append(Line)
+ elif FindPcdsFeatureFlag:
+ Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG, Line))
+ elif FindPcdsPatchableInModule:
+ Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE, Line))
+ elif FindPcdsFixedAtBuild:
+ Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD, Line))
+ elif FindPcdsDynamic:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC, Line))
+ elif FindPcdsDynamicEx:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX, Line))
+ else:
+ KeyValues.append([ListItem, [], [], []])
+
+ return True
+
+## Get Source
+#
+# Get Source of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>
+# [|<PcdFeatureFlag>]]]]
+#
+# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>
+# [|<PcdFeatureFlag>]]]]
+# @param ContainerFile: The file which describes the library class, used
+# for error report
+#
+def GetSource(Item, ContainerFile, FileRelativePath, LineNo= -1):
+ ItemNew = Item + DataType.TAB_VALUE_SPLIT * 4
+ List = GetSplitValueList(ItemNew)
+ if len(List) < 5 or len(List) > 9:
+ RaiseParserError(Item, 'Sources', ContainerFile, \
+ '<Filename>[|<Family>[|<TagName>[|<ToolCode>\
+ [|<PcdFeatureFlag>]]]]', LineNo)
+ List[0] = NormPath(List[0])
+ CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Sources', \
+ Item, LineNo)
+ if List[4] != '':
+ CheckPcdTokenInfo(List[4], 'Sources', ContainerFile, LineNo)
+
+ return (List[0], List[1], List[2], List[3], List[4])
+
+## Get Binary
+#
+# Get Binary of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>
+# [|<PcdFeatureFlag>]]]]
+#
+# @param Item: String as <Filename>[|<Family>[|<TagName>
+# [|<ToolCode>[|<PcdFeatureFlag>]]]]
+# @param ContainerFile: The file which describes the library class,
+# used for error report
+#
+def GetBinary(Item, ContainerFile, LineNo= -1):
+ ItemNew = Item + DataType.TAB_VALUE_SPLIT
+ List = GetSplitValueList(ItemNew)
+ if len(List) < 3 or len(List) > 5:
+ RaiseParserError(Item, 'Binaries', ContainerFile, \
+ "<FileType>|<Filename>[|<Target>\
+ [|<TokenSpaceGuidCName>.<PcdCName>]]", LineNo)
+
+ if len(List) >= 4:
+ if List[3] != '':
+ CheckPcdTokenInfo(List[3], 'Binaries', ContainerFile, LineNo)
+ return (List[0], List[1], List[2], List[3])
+ elif len(List) == 3:
+ return (List[0], List[1], List[2], '')
+
+## Get Guids/Protocols/Ppis
+#
+# Get Guids/Protocols/Ppis of Inf as <GuidCName>[|<PcdFeatureFlag>]
+#
+# @param Item: String as <GuidCName>[|<PcdFeatureFlag>]
+# @param Type: Type of parsing string
+# @param ContainerFile: The file which describes the library class,
+# used for error report
+#
+def GetGuidsProtocolsPpisOfInf(Item):
+ ItemNew = Item + DataType.TAB_VALUE_SPLIT
+ List = GetSplitValueList(ItemNew)
+ return (List[0], List[1])
+
+## Get Guids/Protocols/Ppis
+#
+# Get Guids/Protocols/Ppis of Dec as <GuidCName>=<GuidValue>
+#
+# @param Item: String as <GuidCName>=<GuidValue>
+# @param Type: Type of parsing string
+# @param ContainerFile: The file which describes the library class,
+# used for error report
+#
+def GetGuidsProtocolsPpisOfDec(Item, Type, ContainerFile, LineNo= -1):
+ List = GetSplitValueList(Item, DataType.TAB_EQUAL_SPLIT)
+ if len(List) != 2:
+ RaiseParserError(Item, Type, ContainerFile, '<CName>=<GuidValue>', \
+ LineNo)
+ #
+ #convert C-Format Guid to Register Format
+ #
+ if List[1][0] == '{' and List[1][-1] == '}':
+ RegisterFormatGuid = GuidStructureStringToGuidString(List[1])
+ if RegisterFormatGuid == '':
+ RaiseParserError(Item, Type, ContainerFile, \
+ 'CFormat or RegisterFormat', LineNo)
+ else:
+ if CheckGuidRegFormat(List[1]):
+ RegisterFormatGuid = List[1]
+ else:
+ RaiseParserError(Item, Type, ContainerFile, \
+ 'CFormat or RegisterFormat', LineNo)
+
+ return (List[0], RegisterFormatGuid)
+
+## GetPackage
+#
+# Get Package of Inf as <PackagePath>[|<PcdFeatureFlag>]
+#
+# @param Item: String as <PackagePath>[|<PcdFeatureFlag>]
+# @param Type: Type of parsing string
+# @param ContainerFile: The file which describes the library class,
+# used for error report
+#
+def GetPackage(Item, ContainerFile, FileRelativePath, LineNo= -1):
+ ItemNew = Item + DataType.TAB_VALUE_SPLIT
+ List = GetSplitValueList(ItemNew)
+ CheckFileType(List[0], '.Dec', ContainerFile, 'package', List[0], LineNo)
+ CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Packages', \
+ List[0], LineNo)
+ if List[1] != '':
+ CheckPcdTokenInfo(List[1], 'Packages', ContainerFile, LineNo)
+
+ return (List[0], List[1])
+
+## Get Pcd Values of Inf
+#
+# Get Pcd of Inf as <TokenSpaceGuidCName>.<PcdCName>[|<Value>]
+#
+# @param Item: The string describes pcd
+# @param Type: The type of Pcd
+# @param File: The file which describes the pcd, used for error report
+#
+def GetPcdOfInf(Item, Type, File, LineNo):
+ Format = '<TokenSpaceGuidCName>.<PcdCName>[|<Value>]'
+ TokenGuid, TokenName, Value, InfType = '', '', '', ''
+
+ if Type == DataType.TAB_PCDS_FIXED_AT_BUILD:
+ InfType = DataType.TAB_INF_FIXED_PCD
+ elif Type == DataType.TAB_PCDS_PATCHABLE_IN_MODULE:
+ InfType = DataType.TAB_INF_PATCH_PCD
+ elif Type == DataType.TAB_PCDS_FEATURE_FLAG:
+ InfType = DataType.TAB_INF_FEATURE_PCD
+ elif Type == DataType.TAB_PCDS_DYNAMIC_EX:
+ InfType = DataType.TAB_INF_PCD_EX
+ elif Type == DataType.TAB_PCDS_DYNAMIC:
+ InfType = DataType.TAB_INF_PCD
+ List = GetSplitValueList(Item, DataType.TAB_VALUE_SPLIT, 1)
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ if len(TokenInfo) != 2:
+ RaiseParserError(Item, InfType, File, Format, LineNo)
+ else:
+ TokenGuid = TokenInfo[0]
+ TokenName = TokenInfo[1]
+
+ if len(List) > 1:
+ Value = List[1]
+ else:
+ Value = None
+ return (TokenGuid, TokenName, Value, InfType)
+
+
+## Get Pcd Values of Dec
+#
+# Get Pcd of Dec as <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
+# @param Item: Pcd item
+# @param Type: Pcd type
+# @param File: Dec file
+# @param LineNo: Line number
+#
+def GetPcdOfDec(Item, Type, File, LineNo= -1):
+ Format = '<TokenSpaceGuidCName>.<PcdCName>|<Value>|<DatumType>|<Token>'
+ TokenGuid, TokenName, Value, DatumType, Token = '', '', '', '', ''
+ List = GetSplitValueList(Item)
+ if len(List) != 4:
+ RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
+ else:
+ Value = List[1]
+ DatumType = List[2]
+ Token = List[3]
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ if len(TokenInfo) != 2:
+ RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
+ else:
+ TokenGuid = TokenInfo[0]
+ TokenName = TokenInfo[1]
+
+ return (TokenGuid, TokenName, Value, DatumType, Token, Type)
+
+## Parse DEFINE statement
+#
+# Get DEFINE macros
+#
+# @param LineValue: A DEFINE line value
+# @param StartLine: A DEFINE start line
+# @param Table: A table
+# @param FileID: File ID
+# @param Filename: File name
+# @param SectionName: DEFINE section name
+# @param SectionModel: DEFINE section model
+# @param Arch: DEFINE arch
+#
+def ParseDefine(LineValue, StartLine, Table, FileID, SectionName, \
+ SectionModel, Arch):
+ Logger.Debug(Logger.DEBUG_2, ST.MSG_DEFINE_STATEMENT_FOUND % (LineValue, \
+ SectionName))
+ Define = \
+ GetSplitValueList(CleanString\
+ (LineValue[LineValue.upper().\
+ find(DataType.TAB_DEFINE.upper() + ' ') + \
+ len(DataType.TAB_DEFINE + ' ') : ]), \
+ DataType.TAB_EQUAL_SPLIT, 1)
+ Table.Insert(DataType.MODEL_META_DATA_DEFINE, Define[0], Define[1], '', \
+ '', '', Arch, SectionModel, FileID, StartLine, -1, \
+ StartLine, -1, 0)
+
+## InsertSectionItems
+#
+# Insert item data of a section to a dict
+#
+# @param Model: A model
+# @param CurrentSection: Current section
+# @param SectionItemList: Section item list
+# @param ArchList: Arch list
+# @param ThirdList: Third list
+# @param RecordSet: Record set
+#
+def InsertSectionItems(Model, SectionItemList, ArchList, \
+ ThirdList, RecordSet):
+ #
+ # Insert each item data of a section
+ #
+ for Index in range(0, len(ArchList)):
+ Arch = ArchList[Index]
+ Third = ThirdList[Index]
+ if Arch == '':
+ Arch = DataType.TAB_ARCH_COMMON
+
+ Records = RecordSet[Model]
+ for SectionItem in SectionItemList:
+ LineValue, StartLine, Comment = SectionItem[0], \
+ SectionItem[1], SectionItem[2]
+
+ Logger.Debug(4, ST.MSG_PARSING % LineValue)
+ #
+ # And then parse DEFINE statement
+ #
+ if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
+ continue
+ #
+ # At last parse other sections
+ #
+ IdNum = -1
+ Records.append([LineValue, Arch, StartLine, IdNum, Third, Comment])
+
+ if RecordSet != {}:
+ RecordSet[Model] = Records
+
+## GenMetaDatSectionItem
+#
+# @param Key: A key
+# @param Value: A value
+# @param List: A list
+#
+def GenMetaDatSectionItem(Key, Value, List):
+ if Key not in List:
+ List[Key] = [Value]
+ else:
+ List[Key].append(Value)
+
+## GetPkgInfoFromDec
+#
+# get package name, guid, version info from dec files
+#
+# @param Path: File path
+#
+def GetPkgInfoFromDec(Path):
+ PkgName = None
+ PkgGuid = None
+ PkgVersion = None
+
+ Path = Path.replace('\\', '/')
+
+ if not os.path.exists(Path):
+ Logger.Error("\nUPT", FILE_NOT_FOUND, File=Path)
+
+ if Path in gPKG_INFO_DICT:
+ return gPKG_INFO_DICT[Path]
+
+ try:
+ DecParser = None
+ if Path not in GlobalData.gPackageDict:
+ DecParser = Dec(Path)
+ GlobalData.gPackageDict[Path] = DecParser
+ else:
+ DecParser = GlobalData.gPackageDict[Path]
+
+ PkgName = DecParser.GetPackageName()
+ PkgGuid = DecParser.GetPackageGuid()
+ PkgVersion = DecParser.GetPackageVersion()
+ gPKG_INFO_DICT[Path] = (PkgName, PkgGuid, PkgVersion)
+ return PkgName, PkgGuid, PkgVersion
+ except FatalError:
+ return None, None, None
+
+
+## GetWorkspacePackage
+#
+# Get a list of workspace package information.
+#
+def GetWorkspacePackage():
+ DecFileList = []
+ WorkspaceDir = GlobalData.gWORKSPACE
+ PackageDir = GlobalData.gPACKAGE_PATH
+ for PkgRoot in [WorkspaceDir] + PackageDir:
+ for Root, Dirs, Files in os.walk(PkgRoot):
+ if 'CVS' in Dirs:
+ Dirs.remove('CVS')
+ if '.svn' in Dirs:
+ Dirs.remove('.svn')
+ for Dir in Dirs:
+ if Dir.startswith('.'):
+ Dirs.remove(Dir)
+ for FileSp in Files:
+ if FileSp.startswith('.'):
+ continue
+ Ext = os.path.splitext(FileSp)[1]
+ if Ext.lower() in ['.dec']:
+ DecFileList.append\
+ (os.path.normpath(os.path.join(Root, FileSp)))
+ #
+ # abstract package guid, version info from DecFile List
+ #
+ PkgList = []
+ for DecFile in DecFileList:
+ (PkgName, PkgGuid, PkgVersion) = GetPkgInfoFromDec(DecFile)
+ if PkgName and PkgGuid and PkgVersion:
+ PkgList.append((PkgName, PkgGuid, PkgVersion, DecFile))
+
+ return PkgList
+
+## GetWorkspaceModule
+#
+# Get a list of workspace modules.
+#
+def GetWorkspaceModule():
+ InfFileList = []
+ WorkspaceDir = GlobalData.gWORKSPACE
+ for Root, Dirs, Files in os.walk(WorkspaceDir):
+ if 'CVS' in Dirs:
+ Dirs.remove('CVS')
+ if '.svn' in Dirs:
+ Dirs.remove('.svn')
+ if 'Build' in Dirs:
+ Dirs.remove('Build')
+ for Dir in Dirs:
+ if Dir.startswith('.'):
+ Dirs.remove(Dir)
+ for FileSp in Files:
+ if FileSp.startswith('.'):
+ continue
+ Ext = os.path.splitext(FileSp)[1]
+ if Ext.lower() in ['.inf']:
+ InfFileList.append\
+ (os.path.normpath(os.path.join(Root, FileSp)))
+
+ return InfFileList
+
+## MacroParser used to parse macro definition
+#
+# @param Line: The content contain linestring and line number
+# @param FileName: The meta-file file name
+# @param SectionType: Section for the Line belong to
+# @param FileLocalMacros: A list contain Macro defined in [Defines] section.
+#
+def MacroParser(Line, FileName, SectionType, FileLocalMacros):
+ MacroDefPattern = re.compile("^(DEFINE)[ \t]+")
+ LineContent = Line[0]
+ LineNo = Line[1]
+ Match = MacroDefPattern.match(LineContent)
+ if not Match:
+ #
+ # Not 'DEFINE/EDK_GLOBAL' statement, call decorated method
+ #
+ return None, None
+
+ TokenList = GetSplitValueList(LineContent[Match.end(1):], \
+ DataType.TAB_EQUAL_SPLIT, 1)
+ #
+ # Syntax check
+ #
+ if not TokenList[0]:
+ Logger.Error('Parser', FORMAT_INVALID, ST.ERR_MACRONAME_NOGIVEN,
+ ExtraData=LineContent, File=FileName, Line=LineNo)
+ if len(TokenList) < 2:
+ Logger.Error('Parser', FORMAT_INVALID, ST.ERR_MACROVALUE_NOGIVEN,
+ ExtraData=LineContent, File=FileName, Line=LineNo)
+
+ Name, Value = TokenList
+
+ #
+ # DEFINE defined macros
+ #
+ if SectionType == DataType.MODEL_META_DATA_HEADER:
+ FileLocalMacros[Name] = Value
+
+ ReIsValidMacroName = re.compile(r"^[A-Z][A-Z0-9_]*$", re.DOTALL)
+ if ReIsValidMacroName.match(Name) is None:
+ Logger.Error('Parser',
+ FORMAT_INVALID,
+ ST.ERR_MACRONAME_INVALID % (Name),
+ ExtraData=LineContent,
+ File=FileName,
+ Line=LineNo)
+
+ # Validate MACRO Value
+ #
+ # <MacroDefinition> ::= [<Comments>]{0,}
+ # "DEFINE" <MACRO> "=" [{<PATH>} {<VALUE>}] <EOL>
+ # <Value> ::= {<NumVal>} {<Boolean>} {<AsciiString>} {<GUID>}
+ # {<CString>} {<UnicodeString>} {<CArray>}
+ #
+ # The definition of <NumVal>, <PATH>, <Boolean>, <GUID>, <CString>,
+ # <UnicodeString>, <CArray> are subset of <AsciiString>.
+ #
+ ReIsValidMacroValue = re.compile(r"^[\x20-\x7e]*$", re.DOTALL)
+ if ReIsValidMacroValue.match(Value) is None:
+ Logger.Error('Parser',
+ FORMAT_INVALID,
+ ST.ERR_MACROVALUE_INVALID % (Value),
+ ExtraData=LineContent,
+ File=FileName,
+ Line=LineNo)
+
+ return Name, Value
+
+## GenSection
+#
+# generate section contents
+#
+# @param SectionName: indicate the name of the section, details refer to
+# INF, DEC specs
+# @param SectionDict: section statement dict, key is SectionAttrs(arch,
+# moduletype or platform may exist as needed) list
+# separated by space,
+# value is statement
+#
+def GenSection(SectionName, SectionDict, SplitArch=True, NeedBlankLine=False):
+ Content = ''
+ for SectionAttrs in SectionDict:
+ StatementList = SectionDict[SectionAttrs]
+ if SectionAttrs and SectionName != 'Defines' and SectionAttrs.strip().upper() != DataType.TAB_ARCH_COMMON:
+ if SplitArch:
+ ArchList = GetSplitValueList(SectionAttrs, DataType.TAB_SPACE_SPLIT)
+ else:
+ if SectionName != 'UserExtensions':
+ ArchList = GetSplitValueList(SectionAttrs, DataType.TAB_COMMENT_SPLIT)
+ else:
+ ArchList = [SectionAttrs]
+ for Index in range(0, len(ArchList)):
+ ArchList[Index] = ConvertArchForInstall(ArchList[Index])
+ Section = '[' + SectionName + '.' + (', ' + SectionName + '.').join(ArchList) + ']'
+ else:
+ Section = '[' + SectionName + ']'
+ Content += '\n' + Section + '\n'
+ if StatementList is not None:
+ for Statement in StatementList:
+ LineList = Statement.split('\n')
+ NewStatement = ""
+ for Line in LineList:
+ # ignore blank comment
+ if not Line.replace("#", '').strip() and SectionName not in ('Defines', 'Hob', 'Event', 'BootMode'):
+ continue
+ # add two space before non-comments line except the comments in Defines section
+ if Line.strip().startswith('#') and SectionName == 'Defines':
+ NewStatement += "%s\n" % Line
+ continue
+ NewStatement += " %s\n" % Line
+ if NeedBlankLine:
+ Content += NewStatement + '\n'
+ else:
+ Content += NewStatement
+
+ if NeedBlankLine:
+ Content = Content[:-1]
+ if not Content.replace('\\n', '').strip():
+ return ''
+ return Content
+
+## ConvertArchForInstall
+# if Arch.upper() is in "IA32", "X64", "IPF", and "EBC", it must be upper case. "common" must be lower case.
+# Anything else, the case must be preserved
+#
+# @param Arch: the arch string that need to be converted, it should be stripped before pass in
+# @return: the arch string that get converted
+#
+def ConvertArchForInstall(Arch):
+ if Arch.upper() in [DataType.TAB_ARCH_IA32, DataType.TAB_ARCH_X64,
+ DataType.TAB_ARCH_IPF, DataType.TAB_ARCH_EBC]:
+ Arch = Arch.upper()
+ elif Arch.upper() == DataType.TAB_ARCH_COMMON:
+ Arch = Arch.lower()
+
+ return Arch
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/StringUtils.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/StringUtils.py
new file mode 100755
index 00000000..b3c742c2
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/StringUtils.py
@@ -0,0 +1,982 @@
+## @file
+# This file is used to define common string related functions used in parsing
+# process
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+'''
+StringUtils
+'''
+##
+# Import Modules
+#
+import re
+import os.path
+import Logger.Log as Logger
+import Library.DataType as DataType
+from Logger.ToolError import FORMAT_INVALID
+from Logger.ToolError import PARSER_ERROR
+from Logger import StringTable as ST
+
+#
+# Regular expression for matching macro used in DSC/DEC/INF file inclusion
+#
+gMACRO_PATTERN = re.compile("\$\(([_A-Z][_A-Z0-9]*)\)", re.UNICODE)
+
+## GetSplitValueList
+#
+# Get a value list from a string with multiple values split with SplitTag
+# The default SplitTag is DataType.TAB_VALUE_SPLIT
+# 'AAA|BBB|CCC' -> ['AAA', 'BBB', 'CCC']
+#
+# @param String: The input string to be splitted
+# @param SplitTag: The split key, default is DataType.TAB_VALUE_SPLIT
+# @param MaxSplit: The max number of split values, default is -1
+#
+#
+def GetSplitValueList(String, SplitTag=DataType.TAB_VALUE_SPLIT, MaxSplit= -1):
+ return list(map(lambda l: l.strip(), String.split(SplitTag, MaxSplit)))
+
+## MergeArches
+#
+# Find a key's all arches in dict, add the new arch to the list
+# If not exist any arch, set the arch directly
+#
+# @param Dict: The input value for Dict
+# @param Key: The input value for Key
+# @param Arch: The Arch to be added or merged
+#
+def MergeArches(Dict, Key, Arch):
+ if Key in Dict.keys():
+ Dict[Key].append(Arch)
+ else:
+ Dict[Key] = Arch.split()
+
+## GenDefines
+#
+# Parse a string with format "DEFINE <VarName> = <PATH>"
+# Generate a map Defines[VarName] = PATH
+# Return False if invalid format
+#
+# @param String: String with DEFINE statement
+# @param Arch: Supported Arch
+# @param Defines: DEFINE statement to be parsed
+#
+def GenDefines(String, Arch, Defines):
+ if String.find(DataType.TAB_DEFINE + ' ') > -1:
+ List = String.replace(DataType.TAB_DEFINE + ' ', '').\
+ split(DataType.TAB_EQUAL_SPLIT)
+ if len(List) == 2:
+ Defines[(CleanString(List[0]), Arch)] = CleanString(List[1])
+ return 0
+ else:
+ return -1
+ return 1
+
+## GetLibraryClassesWithModuleType
+#
+# Get Library Class definition when no module type defined
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+def GetLibraryClassesWithModuleType(Lines, Key, KeyValues, CommentCharacter):
+ NewKey = SplitModuleType(Key)
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ LineList = Lines.splitlines()
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line != '' and Line[0] != CommentCharacter:
+ KeyValues.append([CleanString(Line, CommentCharacter), NewKey[1]])
+
+ return True
+
+## GetDynamics
+#
+# Get Dynamic Pcds
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+def GetDynamics(Lines, Key, KeyValues, CommentCharacter):
+ #
+ # Get SkuId Name List
+ #
+ SkuIdNameList = SplitModuleType(Key)
+
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ LineList = Lines.splitlines()
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line != '' and Line[0] != CommentCharacter:
+ KeyValues.append([CleanString(Line, CommentCharacter), SkuIdNameList[1]])
+
+ return True
+
+## SplitModuleType
+#
+# Split ModuleType out of section defien to get key
+# [LibraryClass.Arch.ModuleType|ModuleType|ModuleType] -> [
+# 'LibraryClass.Arch', ['ModuleType', 'ModuleType', 'ModuleType'] ]
+#
+# @param Key: String to be parsed
+#
+def SplitModuleType(Key):
+ KeyList = Key.split(DataType.TAB_SPLIT)
+ #
+ # Fill in for arch
+ #
+ KeyList.append('')
+ #
+ # Fill in for moduletype
+ #
+ KeyList.append('')
+ ReturnValue = []
+ KeyValue = KeyList[0]
+ if KeyList[1] != '':
+ KeyValue = KeyValue + DataType.TAB_SPLIT + KeyList[1]
+ ReturnValue.append(KeyValue)
+ ReturnValue.append(GetSplitValueList(KeyList[2]))
+
+ return ReturnValue
+
+## Replace macro in string
+#
+# This method replace macros used in given string. The macros are given in a
+# dictionary.
+#
+# @param String String to be processed
+# @param MacroDefinitions The macro definitions in the form of dictionary
+# @param SelfReplacement To decide whether replace un-defined macro to ''
+# @param Line: The content contain line string and line number
+# @param FileName: The meta-file file name
+#
+def ReplaceMacro(String, MacroDefinitions=None, SelfReplacement=False, Line=None, FileName=None, Flag=False):
+ LastString = String
+ if MacroDefinitions is None:
+ MacroDefinitions = {}
+ while MacroDefinitions:
+ QuotedStringList = []
+ HaveQuotedMacroFlag = False
+ if not Flag:
+ MacroUsed = gMACRO_PATTERN.findall(String)
+ else:
+ ReQuotedString = re.compile('\"')
+ QuotedStringList = ReQuotedString.split(String)
+ if len(QuotedStringList) >= 3:
+ HaveQuotedMacroFlag = True
+ Count = 0
+ MacroString = ""
+ for QuotedStringItem in QuotedStringList:
+ Count += 1
+ if Count % 2 != 0:
+ MacroString += QuotedStringItem
+
+ if Count == len(QuotedStringList) and Count % 2 == 0:
+ MacroString += QuotedStringItem
+
+ MacroUsed = gMACRO_PATTERN.findall(MacroString)
+ #
+ # no macro found in String, stop replacing
+ #
+ if len(MacroUsed) == 0:
+ break
+ for Macro in MacroUsed:
+ if Macro not in MacroDefinitions:
+ if SelfReplacement:
+ String = String.replace("$(%s)" % Macro, '')
+ Logger.Debug(5, "Delete undefined MACROs in file %s line %d: %s!" % (FileName, Line[1], Line[0]))
+ continue
+ if not HaveQuotedMacroFlag:
+ String = String.replace("$(%s)" % Macro, MacroDefinitions[Macro])
+ else:
+ Count = 0
+ for QuotedStringItem in QuotedStringList:
+ Count += 1
+ if Count % 2 != 0:
+ QuotedStringList[Count - 1] = QuotedStringList[Count - 1].replace("$(%s)" % Macro,
+ MacroDefinitions[Macro])
+ elif Count == len(QuotedStringList) and Count % 2 == 0:
+ QuotedStringList[Count - 1] = QuotedStringList[Count - 1].replace("$(%s)" % Macro,
+ MacroDefinitions[Macro])
+
+ RetString = ''
+ if HaveQuotedMacroFlag:
+ Count = 0
+ for QuotedStringItem in QuotedStringList:
+ Count += 1
+ if Count != len(QuotedStringList):
+ RetString += QuotedStringList[Count - 1] + "\""
+ else:
+ RetString += QuotedStringList[Count - 1]
+
+ String = RetString
+
+ #
+ # in case there's macro not defined
+ #
+ if String == LastString:
+ break
+ LastString = String
+
+ return String
+
+## NormPath
+#
+# Create a normal path
+# And replace DEFINE in the path
+#
+# @param Path: The input value for Path to be converted
+# @param Defines: A set for DEFINE statement
+#
+def NormPath(Path, Defines=None):
+ IsRelativePath = False
+ if Defines is None:
+ Defines = {}
+ if Path:
+ if Path[0] == '.':
+ IsRelativePath = True
+ #
+ # Replace with Define
+ #
+ if Defines:
+ Path = ReplaceMacro(Path, Defines)
+ #
+ # To local path format
+ #
+ Path = os.path.normpath(Path)
+
+ if IsRelativePath and Path[0] != '.':
+ Path = os.path.join('.', Path)
+ return Path
+
+## CleanString
+#
+# Remove comments in a string
+# Remove spaces
+#
+# @param Line: The string to be cleaned
+# @param CommentCharacter: Comment char, used to ignore comment content,
+# default is DataType.TAB_COMMENT_SPLIT
+#
+def CleanString(Line, CommentCharacter=DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):
+ #
+ # remove whitespace
+ #
+ Line = Line.strip()
+ #
+ # Replace EDK1's comment character
+ #
+ if AllowCppStyleComment:
+ Line = Line.replace(DataType.TAB_COMMENT_EDK1_SPLIT, CommentCharacter)
+ #
+ # remove comments, but we should escape comment character in string
+ #
+ InString = False
+ for Index in range(0, len(Line)):
+ if Line[Index] == '"':
+ InString = not InString
+ elif Line[Index] == CommentCharacter and not InString:
+ Line = Line[0: Index]
+ break
+ #
+ # remove whitespace again
+ #
+ Line = Line.strip()
+
+ return Line
+
+## CleanString2
+#
+# Split comments in a string
+# Remove spaces
+#
+# @param Line: The string to be cleaned
+# @param CommentCharacter: Comment char, used to ignore comment content,
+# default is DataType.TAB_COMMENT_SPLIT
+#
+def CleanString2(Line, CommentCharacter=DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):
+ #
+ # remove whitespace
+ #
+ Line = Line.strip()
+ #
+ # Replace EDK1's comment character
+ #
+ if AllowCppStyleComment:
+ Line = Line.replace(DataType.TAB_COMMENT_EDK1_SPLIT, CommentCharacter)
+ #
+ # separate comments and statements
+ #
+ LineParts = Line.split(CommentCharacter, 1)
+ #
+ # remove whitespace again
+ #
+ Line = LineParts[0].strip()
+ if len(LineParts) > 1:
+ Comment = LineParts[1].strip()
+ #
+ # Remove prefixed and trailing comment characters
+ #
+ Start = 0
+ End = len(Comment)
+ while Start < End and Comment.startswith(CommentCharacter, Start, End):
+ Start += 1
+ while End >= 0 and Comment.endswith(CommentCharacter, Start, End):
+ End -= 1
+ Comment = Comment[Start:End]
+ Comment = Comment.strip()
+ else:
+ Comment = ''
+
+ return Line, Comment
+
+## GetMultipleValuesOfKeyFromLines
+#
+# Parse multiple strings to clean comment and spaces
+# The result is saved to KeyValues
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+def GetMultipleValuesOfKeyFromLines(Lines, Key, KeyValues, CommentCharacter):
+ if Key:
+ pass
+ if KeyValues:
+ pass
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ LineList = Lines.split('\n')
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line != '' and Line[0] != CommentCharacter:
+ KeyValues += [Line]
+ return True
+
+## GetDefineValue
+#
+# Parse a DEFINE statement to get defined value
+# DEFINE Key Value
+#
+# @param String: The content to be parsed
+# @param Key: The key of DEFINE statement
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+def GetDefineValue(String, Key, CommentCharacter):
+ if CommentCharacter:
+ pass
+ String = CleanString(String)
+ return String[String.find(Key + ' ') + len(Key + ' ') : ]
+
+## GetSingleValueOfKeyFromLines
+#
+# Parse multiple strings as below to get value of each definition line
+# Key1 = Value1
+# Key2 = Value2
+# The result is saved to Dictionary
+#
+# @param Lines: The content to be parsed
+# @param Dictionary: To store data after parsing
+# @param CommentCharacter: Comment char, be used to ignore comment content
+# @param KeySplitCharacter: Key split char, between key name and key value.
+# Key1 = Value1, '=' is the key split char
+# @param ValueSplitFlag: Value split flag, be used to decide if has
+# multiple values
+# @param ValueSplitCharacter: Value split char, be used to split multiple
+# values. Key1 = Value1|Value2, '|' is the value
+# split char
+#
+def GetSingleValueOfKeyFromLines(Lines, Dictionary, CommentCharacter, KeySplitCharacter, \
+ ValueSplitFlag, ValueSplitCharacter):
+ Lines = Lines.split('\n')
+ Keys = []
+ Value = ''
+ DefineValues = ['']
+ SpecValues = ['']
+
+ for Line in Lines:
+ #
+ # Handle DEFINE and SPEC
+ #
+ if Line.find(DataType.TAB_INF_DEFINES_DEFINE + ' ') > -1:
+ if '' in DefineValues:
+ DefineValues.remove('')
+ DefineValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_DEFINE, CommentCharacter))
+ continue
+ if Line.find(DataType.TAB_INF_DEFINES_SPEC + ' ') > -1:
+ if '' in SpecValues:
+ SpecValues.remove('')
+ SpecValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_SPEC, CommentCharacter))
+ continue
+
+ #
+ # Handle Others
+ #
+ LineList = Line.split(KeySplitCharacter, 1)
+ if len(LineList) >= 2:
+ Key = LineList[0].split()
+ if len(Key) == 1 and Key[0][0] != CommentCharacter:
+ #
+ # Remove comments and white spaces
+ #
+ LineList[1] = CleanString(LineList[1], CommentCharacter)
+ if ValueSplitFlag:
+ Value = list(map(lambda x: x.strip(), LineList[1].split(ValueSplitCharacter)))
+ else:
+ Value = CleanString(LineList[1], CommentCharacter).splitlines()
+
+ if Key[0] in Dictionary:
+ if Key[0] not in Keys:
+ Dictionary[Key[0]] = Value
+ Keys.append(Key[0])
+ else:
+ Dictionary[Key[0]].extend(Value)
+ else:
+ Dictionary[DataType.TAB_INF_DEFINES_MACRO][Key[0]] = Value[0]
+
+ if DefineValues == []:
+ DefineValues = ['']
+ if SpecValues == []:
+ SpecValues = ['']
+ Dictionary[DataType.TAB_INF_DEFINES_DEFINE] = DefineValues
+ Dictionary[DataType.TAB_INF_DEFINES_SPEC] = SpecValues
+
+ return True
+
+## The content to be parsed
+#
+# Do pre-check for a file before it is parsed
+# Check $()
+# Check []
+#
+# @param FileName: Used for error report
+# @param FileContent: File content to be parsed
+# @param SupSectionTag: Used for error report
+#
+def PreCheck(FileName, FileContent, SupSectionTag):
+ if SupSectionTag:
+ pass
+ LineNo = 0
+ IsFailed = False
+ NewFileContent = ''
+ for Line in FileContent.splitlines():
+ LineNo = LineNo + 1
+ #
+ # Clean current line
+ #
+ Line = CleanString(Line)
+ #
+ # Remove commented line
+ #
+ if Line.find(DataType.TAB_COMMA_SPLIT) == 0:
+ Line = ''
+ #
+ # Check $()
+ #
+ if Line.find('$') > -1:
+ if Line.find('$(') < 0 or Line.find(')') < 0:
+ Logger.Error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=Logger.IS_RAISE_ERROR)
+ #
+ # Check []
+ #
+ if Line.find('[') > -1 or Line.find(']') > -1:
+ #
+ # Only get one '[' or one ']'
+ #
+ if not (Line.find('[') > -1 and Line.find(']') > -1):
+ Logger.Error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=Logger.IS_RAISE_ERROR)
+ #
+ # Regenerate FileContent
+ #
+ NewFileContent = NewFileContent + Line + '\r\n'
+
+ if IsFailed:
+ Logger.Error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=Logger.IS_RAISE_ERROR)
+
+ return NewFileContent
+
+## CheckFileType
+#
+# Check if the Filename is including ExtName
+# Return True if it exists
+# Raise a error message if it not exists
+#
+# @param CheckFilename: Name of the file to be checked
+# @param ExtName: Ext name of the file to be checked
+# @param ContainerFilename: The container file which describes the file to be
+# checked, used for error report
+# @param SectionName: Used for error report
+# @param Line: The line in container file which defines the file
+# to be checked
+#
+def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo= -1):
+ if CheckFilename != '' and CheckFilename is not None:
+ (Root, Ext) = os.path.splitext(CheckFilename)
+ if Ext.upper() != ExtName.upper() and Root:
+ ContainerFile = open(ContainerFilename, 'r').read()
+ if LineNo == -1:
+ LineNo = GetLineNo(ContainerFile, Line)
+ ErrorMsg = ST.ERR_SECTIONNAME_INVALID % (SectionName, CheckFilename, ExtName)
+ Logger.Error("Parser", PARSER_ERROR, ErrorMsg, Line=LineNo, \
+ File=ContainerFilename, RaiseError=Logger.IS_RAISE_ERROR)
+
+ return True
+
+## CheckFileExist
+#
+# Check if the file exists
+# Return True if it exists
+# Raise a error message if it not exists
+#
+# @param CheckFilename: Name of the file to be checked
+# @param WorkspaceDir: Current workspace dir
+# @param ContainerFilename: The container file which describes the file to
+# be checked, used for error report
+# @param SectionName: Used for error report
+# @param Line: The line in container file which defines the
+# file to be checked
+#
+def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo= -1):
+ CheckFile = ''
+ if CheckFilename != '' and CheckFilename is not None:
+ CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
+ if not os.path.isfile(CheckFile):
+ ContainerFile = open(ContainerFilename, 'r').read()
+ if LineNo == -1:
+ LineNo = GetLineNo(ContainerFile, Line)
+ ErrorMsg = ST.ERR_CHECKFILE_NOTFOUND % (CheckFile, SectionName)
+ Logger.Error("Parser", PARSER_ERROR, ErrorMsg,
+ File=ContainerFilename, Line=LineNo, RaiseError=Logger.IS_RAISE_ERROR)
+ return CheckFile
+
+## GetLineNo
+#
+# Find the index of a line in a file
+#
+# @param FileContent: Search scope
+# @param Line: Search key
+#
+def GetLineNo(FileContent, Line, IsIgnoreComment=True):
+ LineList = FileContent.splitlines()
+ for Index in range(len(LineList)):
+ if LineList[Index].find(Line) > -1:
+ #
+ # Ignore statement in comment
+ #
+ if IsIgnoreComment:
+ if LineList[Index].strip()[0] == DataType.TAB_COMMENT_SPLIT:
+ continue
+ return Index + 1
+
+ return -1
+
+## RaiseParserError
+#
+# Raise a parser error
+#
+# @param Line: String which has error
+# @param Section: Used for error report
+# @param File: File which has the string
+# @param Format: Correct format
+#
+def RaiseParserError(Line, Section, File, Format='', LineNo= -1):
+ if LineNo == -1:
+ LineNo = GetLineNo(open(os.path.normpath(File), 'r').read(), Line)
+ ErrorMsg = ST.ERR_INVALID_NOTFOUND % (Line, Section)
+ if Format != '':
+ Format = "Correct format is " + Format
+ Logger.Error("Parser", PARSER_ERROR, ErrorMsg, File=File, Line=LineNo, \
+ ExtraData=Format, RaiseError=Logger.IS_RAISE_ERROR)
+
+## WorkspaceFile
+#
+# Return a full path with workspace dir
+#
+# @param WorkspaceDir: Workspace dir
+# @param Filename: Relative file name
+#
+def WorkspaceFile(WorkspaceDir, Filename):
+ return os.path.join(NormPath(WorkspaceDir), NormPath(Filename))
+
+## Split string
+#
+# Remove '"' which startswith and endswith string
+#
+# @param String: The string need to be split
+#
+def SplitString(String):
+ if String.startswith('\"'):
+ String = String[1:]
+ if String.endswith('\"'):
+ String = String[:-1]
+ return String
+
+## Convert To Sql String
+#
+# Replace "'" with "''" in each item of StringList
+#
+# @param StringList: A list for strings to be converted
+#
+def ConvertToSqlString(StringList):
+ return list(map(lambda s: s.replace("'", "''"), StringList))
+
+## Convert To Sql String
+#
+# Replace "'" with "''" in the String
+#
+# @param String: A String to be converted
+#
+def ConvertToSqlString2(String):
+ return String.replace("'", "''")
+
+## GetStringOfList
+#
+# Get String of a List
+#
+# @param Lines: string list
+# @param Split: split character
+#
+def GetStringOfList(List, Split=' '):
+ if not isinstance(List, type([])):
+ return List
+ Str = ''
+ for Item in List:
+ Str = Str + Item + Split
+ return Str.strip()
+
+## Get HelpTextList
+#
+# Get HelpTextList from HelpTextClassList
+#
+# @param HelpTextClassList: Help Text Class List
+#
+def GetHelpTextList(HelpTextClassList):
+ List = []
+ if HelpTextClassList:
+ for HelpText in HelpTextClassList:
+ if HelpText.String.endswith('\n'):
+ HelpText.String = HelpText.String[0: len(HelpText.String) - len('\n')]
+ List.extend(HelpText.String.split('\n'))
+ return List
+
+## Get String Array Length
+#
+# Get String Array Length
+#
+# @param String: the source string
+#
+def StringArrayLength(String):
+ if String.startswith('L"'):
+ return (len(String) - 3 + 1) * 2
+ elif String.startswith('"'):
+ return (len(String) - 2 + 1)
+ else:
+ return len(String.split()) + 1
+
+## RemoveDupOption
+#
+# Remove Dup Option
+#
+# @param OptionString: the option string
+# @param Which: Which flag
+# @param Against: Against flag
+#
+def RemoveDupOption(OptionString, Which="/I", Against=None):
+ OptionList = OptionString.split()
+ ValueList = []
+ if Against:
+ ValueList += Against
+ for Index in range(len(OptionList)):
+ Opt = OptionList[Index]
+ if not Opt.startswith(Which):
+ continue
+ if len(Opt) > len(Which):
+ Val = Opt[len(Which):]
+ else:
+ Val = ""
+ if Val in ValueList:
+ OptionList[Index] = ""
+ else:
+ ValueList.append(Val)
+ return " ".join(OptionList)
+
+## Check if the string is HexDgit
+#
+# Return true if all characters in the string are digits and there is at
+# least one character
+# or valid Hexs (started with 0x, following by hexdigit letters)
+# , false otherwise.
+# @param string: input string
+#
+def IsHexDigit(Str):
+ try:
+ int(Str, 10)
+ return True
+ except ValueError:
+ if len(Str) > 2 and Str.upper().startswith('0X'):
+ try:
+ int(Str, 16)
+ return True
+ except ValueError:
+ return False
+ return False
+
+## Check if the string is HexDgit and its integer value within limit of UINT32
+#
+# Return true if all characters in the string are digits and there is at
+# least one character
+# or valid Hexs (started with 0x, following by hexdigit letters)
+# , false otherwise.
+# @param string: input string
+#
+def IsHexDigitUINT32(Str):
+ try:
+ Value = int(Str, 10)
+ if (Value <= 0xFFFFFFFF) and (Value >= 0):
+ return True
+ except ValueError:
+ if len(Str) > 2 and Str.upper().startswith('0X'):
+ try:
+ Value = int(Str, 16)
+ if (Value <= 0xFFFFFFFF) and (Value >= 0):
+ return True
+ except ValueError:
+ return False
+ return False
+
+## CleanSpecialChar
+#
+# The ASCII text files of type INF, DEC, INI are edited by developers,
+# and may contain characters that cannot be directly translated to strings that
+# are conformant with the UDP XML Schema. Any characters in this category
+# (0x00-0x08, TAB [0x09], 0x0B, 0x0C, 0x0E-0x1F, 0x80-0xFF)
+# must be converted to a space character[0x20] as part of the parsing process.
+#
+def ConvertSpecialChar(Lines):
+ RetLines = []
+ for line in Lines:
+ ReMatchSpecialChar = re.compile(r"[\x00-\x08]|\x09|\x0b|\x0c|[\x0e-\x1f]|[\x7f-\xff]")
+ RetLines.append(ReMatchSpecialChar.sub(' ', line))
+
+ return RetLines
+
+## __GetTokenList
+#
+# Assume Str is a valid feature flag expression.
+# Return a list which contains tokens: alpha numeric token and other token
+# Whitespace are not stripped
+#
+def __GetTokenList(Str):
+ InQuote = False
+ Token = ''
+ TokenOP = ''
+ PreChar = ''
+ List = []
+ for Char in Str:
+ if InQuote:
+ Token += Char
+ if Char == '"' and PreChar != '\\':
+ InQuote = not InQuote
+ List.append(Token)
+ Token = ''
+ continue
+ if Char == '"':
+ if Token and Token != 'L':
+ List.append(Token)
+ Token = ''
+ if TokenOP:
+ List.append(TokenOP)
+ TokenOP = ''
+ InQuote = not InQuote
+ Token += Char
+ continue
+
+ if not (Char.isalnum() or Char in '_'):
+ TokenOP += Char
+ if Token:
+ List.append(Token)
+ Token = ''
+ else:
+ Token += Char
+ if TokenOP:
+ List.append(TokenOP)
+ TokenOP = ''
+
+ if PreChar == '\\' and Char == '\\':
+ PreChar = ''
+ else:
+ PreChar = Char
+ if Token:
+ List.append(Token)
+ if TokenOP:
+ List.append(TokenOP)
+ return List
+
+## ConvertNEToNOTEQ
+#
+# Convert NE operator to NOT EQ
+# For example: 1 NE 2 -> 1 NOT EQ 2
+#
+# @param Expr: Feature flag expression to be converted
+#
+def ConvertNEToNOTEQ(Expr):
+ List = __GetTokenList(Expr)
+ for Index in range(len(List)):
+ if List[Index] == 'NE':
+ List[Index] = 'NOT EQ'
+ return ''.join(List)
+
+## ConvertNOTEQToNE
+#
+# Convert NOT EQ operator to NE
+# For example: 1 NOT NE 2 -> 1 NE 2
+#
+# @param Expr: Feature flag expression to be converted
+#
+def ConvertNOTEQToNE(Expr):
+ List = __GetTokenList(Expr)
+ HasNOT = False
+ RetList = []
+ for Token in List:
+ if HasNOT and Token == 'EQ':
+ # At least, 'NOT' is in the list
+ while not RetList[-1].strip():
+ RetList.pop()
+ RetList[-1] = 'NE'
+ HasNOT = False
+ continue
+ if Token == 'NOT':
+ HasNOT = True
+ elif Token.strip():
+ HasNOT = False
+ RetList.append(Token)
+
+ return ''.join(RetList)
+
+## SplitPcdEntry
+#
+# Split an PCD entry string to Token.CName and PCD value and FFE.
+# NOTE: PCD Value and FFE can contain "|" in its expression. And in INF specification, have below rule.
+# When using the characters "|" or "||" in an expression, the expression must be encapsulated in
+# open "(" and close ")" parenthesis.
+#
+# @param String An PCD entry string need to be split.
+#
+# @return List [PcdTokenCName, Value, FFE]
+#
+def SplitPcdEntry(String):
+ if not String:
+ return ['', '', ''], False
+
+ PcdTokenCName = ''
+ PcdValue = ''
+ PcdFeatureFlagExp = ''
+
+ ValueList = GetSplitValueList(String, "|", 1)
+
+ #
+ # Only contain TokenCName
+ #
+ if len(ValueList) == 1:
+ return [ValueList[0]], True
+
+ NewValueList = []
+
+ if len(ValueList) == 2:
+ PcdTokenCName = ValueList[0]
+
+ InQuote = False
+ InParenthesis = False
+ StrItem = ''
+ for StrCh in ValueList[1]:
+ if StrCh == '"':
+ InQuote = not InQuote
+ elif StrCh == '(' or StrCh == ')':
+ InParenthesis = not InParenthesis
+
+ if StrCh == '|':
+ if not InQuote or not InParenthesis:
+ NewValueList.append(StrItem.strip())
+ StrItem = ' '
+ continue
+
+ StrItem += StrCh
+
+ NewValueList.append(StrItem.strip())
+
+ if len(NewValueList) == 1:
+ PcdValue = NewValueList[0]
+ return [PcdTokenCName, PcdValue], True
+ elif len(NewValueList) == 2:
+ PcdValue = NewValueList[0]
+ PcdFeatureFlagExp = NewValueList[1]
+ return [PcdTokenCName, PcdValue, PcdFeatureFlagExp], True
+ else:
+ return ['', '', ''], False
+
+ return ['', '', ''], False
+
+## Check if two arches matched?
+#
+# @param Arch1
+# @param Arch2
+#
+def IsMatchArch(Arch1, Arch2):
+ if 'COMMON' in Arch1 or 'COMMON' in Arch2:
+ return True
+ try:
+ if isinstance(Arch1, list) and isinstance(Arch2, list):
+ for Item1 in Arch1:
+ for Item2 in Arch2:
+ if Item1 == Item2:
+ return True
+
+ elif isinstance(Arch1, list):
+ return Arch2 in Arch1
+
+ elif isinstance(Arch2, list):
+ return Arch1 in Arch2
+
+ else:
+ if Arch1 == Arch2:
+ return True
+ except:
+ return False
+
+# Search all files in FilePath to find the FileName with the largest index
+# Return the FileName with index +1 under the FilePath
+#
+def GetUniFileName(FilePath, FileName):
+ Files = []
+ try:
+ Files = os.listdir(FilePath)
+ except:
+ pass
+
+ LargestIndex = -1
+ IndexNotFound = True
+ for File in Files:
+ if File.upper().startswith(FileName.upper()) and File.upper().endswith('.UNI'):
+ Index = File.upper().replace(FileName.upper(), '').replace('.UNI', '')
+ if Index:
+ try:
+ Index = int(Index)
+ except Exception:
+ Index = -1
+ else:
+ IndexNotFound = False
+ Index = 0
+ if Index > LargestIndex:
+ LargestIndex = Index + 1
+
+ if LargestIndex > -1 and not IndexNotFound:
+ return os.path.normpath(os.path.join(FilePath, FileName + str(LargestIndex) + '.uni'))
+ else:
+ return os.path.normpath(os.path.join(FilePath, FileName + '.uni'))
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/UniClassObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/UniClassObject.py
new file mode 100755
index 00000000..1eaa8f7d
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/UniClassObject.py
@@ -0,0 +1,1074 @@
+## @file
+# Collect all defined strings in multiple uni files.
+#
+# Copyright (c) 2014 - 2019, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+"""
+Collect all defined strings in multiple uni files
+"""
+from __future__ import print_function
+
+##
+# Import Modules
+#
+import os, codecs, re
+import distutils.util
+from Logger import ToolError
+from Logger import Log as EdkLogger
+from Logger import StringTable as ST
+from Library.StringUtils import GetLineNo
+from Library.Misc import PathClass
+from Library.Misc import GetCharIndexOutStr
+from Library import DataType as DT
+from Library.ParserValidate import CheckUTF16FileHeader
+
+##
+# Static definitions
+#
+UNICODE_WIDE_CHAR = u'\\wide'
+UNICODE_NARROW_CHAR = u'\\narrow'
+UNICODE_NON_BREAKING_CHAR = u'\\nbr'
+UNICODE_UNICODE_CR = '\r'
+UNICODE_UNICODE_LF = '\n'
+
+NARROW_CHAR = u'\uFFF0'
+WIDE_CHAR = u'\uFFF1'
+NON_BREAKING_CHAR = u'\uFFF2'
+CR = u'\u000D'
+LF = u'\u000A'
+NULL = u'\u0000'
+TAB = u'\t'
+BACK_SPLASH = u'\\'
+
+gLANG_CONV_TABLE = {'eng':'en', 'fra':'fr', \
+ 'aar':'aa', 'abk':'ab', 'ave':'ae', 'afr':'af', 'aka':'ak', 'amh':'am', \
+ 'arg':'an', 'ara':'ar', 'asm':'as', 'ava':'av', 'aym':'ay', 'aze':'az', \
+ 'bak':'ba', 'bel':'be', 'bul':'bg', 'bih':'bh', 'bis':'bi', 'bam':'bm', \
+ 'ben':'bn', 'bod':'bo', 'bre':'br', 'bos':'bs', 'cat':'ca', 'che':'ce', \
+ 'cha':'ch', 'cos':'co', 'cre':'cr', 'ces':'cs', 'chu':'cu', 'chv':'cv', \
+ 'cym':'cy', 'dan':'da', 'deu':'de', 'div':'dv', 'dzo':'dz', 'ewe':'ee', \
+ 'ell':'el', 'epo':'eo', 'spa':'es', 'est':'et', 'eus':'eu', 'fas':'fa', \
+ 'ful':'ff', 'fin':'fi', 'fij':'fj', 'fao':'fo', 'fry':'fy', 'gle':'ga', \
+ 'gla':'gd', 'glg':'gl', 'grn':'gn', 'guj':'gu', 'glv':'gv', 'hau':'ha', \
+ 'heb':'he', 'hin':'hi', 'hmo':'ho', 'hrv':'hr', 'hat':'ht', 'hun':'hu', \
+ 'hye':'hy', 'her':'hz', 'ina':'ia', 'ind':'id', 'ile':'ie', 'ibo':'ig', \
+ 'iii':'ii', 'ipk':'ik', 'ido':'io', 'isl':'is', 'ita':'it', 'iku':'iu', \
+ 'jpn':'ja', 'jav':'jv', 'kat':'ka', 'kon':'kg', 'kik':'ki', 'kua':'kj', \
+ 'kaz':'kk', 'kal':'kl', 'khm':'km', 'kan':'kn', 'kor':'ko', 'kau':'kr', \
+ 'kas':'ks', 'kur':'ku', 'kom':'kv', 'cor':'kw', 'kir':'ky', 'lat':'la', \
+ 'ltz':'lb', 'lug':'lg', 'lim':'li', 'lin':'ln', 'lao':'lo', 'lit':'lt', \
+ 'lub':'lu', 'lav':'lv', 'mlg':'mg', 'mah':'mh', 'mri':'mi', 'mkd':'mk', \
+ 'mal':'ml', 'mon':'mn', 'mar':'mr', 'msa':'ms', 'mlt':'mt', 'mya':'my', \
+ 'nau':'na', 'nob':'nb', 'nde':'nd', 'nep':'ne', 'ndo':'ng', 'nld':'nl', \
+ 'nno':'nn', 'nor':'no', 'nbl':'nr', 'nav':'nv', 'nya':'ny', 'oci':'oc', \
+ 'oji':'oj', 'orm':'om', 'ori':'or', 'oss':'os', 'pan':'pa', 'pli':'pi', \
+ 'pol':'pl', 'pus':'ps', 'por':'pt', 'que':'qu', 'roh':'rm', 'run':'rn', \
+ 'ron':'ro', 'rus':'ru', 'kin':'rw', 'san':'sa', 'srd':'sc', 'snd':'sd', \
+ 'sme':'se', 'sag':'sg', 'sin':'si', 'slk':'sk', 'slv':'sl', 'smo':'sm', \
+ 'sna':'sn', 'som':'so', 'sqi':'sq', 'srp':'sr', 'ssw':'ss', 'sot':'st', \
+ 'sun':'su', 'swe':'sv', 'swa':'sw', 'tam':'ta', 'tel':'te', 'tgk':'tg', \
+ 'tha':'th', 'tir':'ti', 'tuk':'tk', 'tgl':'tl', 'tsn':'tn', 'ton':'to', \
+ 'tur':'tr', 'tso':'ts', 'tat':'tt', 'twi':'tw', 'tah':'ty', 'uig':'ug', \
+ 'ukr':'uk', 'urd':'ur', 'uzb':'uz', 'ven':'ve', 'vie':'vi', 'vol':'vo', \
+ 'wln':'wa', 'wol':'wo', 'xho':'xh', 'yid':'yi', 'yor':'yo', 'zha':'za', \
+ 'zho':'zh', 'zul':'zu'}
+
+## Convert a python unicode string to a normal string
+#
+# Convert a python unicode string to a normal string
+# UniToStr(u'I am a string') is 'I am a string'
+#
+# @param Uni: The python unicode string
+#
+# @retval: The formatted normal string
+#
+def UniToStr(Uni):
+ return repr(Uni)[2:-1]
+
+## Convert a unicode string to a Hex list
+#
+# Convert a unicode string to a Hex list
+# UniToHexList('ABC') is ['0x41', '0x00', '0x42', '0x00', '0x43', '0x00']
+#
+# @param Uni: The python unicode string
+#
+# @retval List: The formatted hex list
+#
+def UniToHexList(Uni):
+ List = []
+ for Item in Uni:
+ Temp = '%04X' % ord(Item)
+ List.append('0x' + Temp[2:4])
+ List.append('0x' + Temp[0:2])
+ return List
+
+## Convert special unicode characters
+#
+# Convert special characters to (c), (r) and (tm).
+#
+# @param Uni: The python unicode string
+#
+# @retval NewUni: The converted unicode string
+#
+def ConvertSpecialUnicodes(Uni):
+ OldUni = NewUni = Uni
+ NewUni = NewUni.replace(u'\u00A9', '(c)')
+ NewUni = NewUni.replace(u'\u00AE', '(r)')
+ NewUni = NewUni.replace(u'\u2122', '(tm)')
+ if OldUni == NewUni:
+ NewUni = OldUni
+ return NewUni
+
+## GetLanguageCode1766
+#
+# Check the language code read from .UNI file and convert RFC 4646 codes to RFC 1766 codes
+# RFC 1766 language codes supported in compatibility mode
+# RFC 4646 language codes supported in native mode
+#
+# @param LangName: Language codes read from .UNI file
+#
+# @retval LangName: Valid language code in RFC 1766 format or None
+#
+def GetLanguageCode1766(LangName, File=None):
+ return LangName
+
+ length = len(LangName)
+ if length == 2:
+ if LangName.isalpha():
+ for Key in gLANG_CONV_TABLE.keys():
+ if gLANG_CONV_TABLE.get(Key) == LangName.lower():
+ return Key
+ elif length == 3:
+ if LangName.isalpha() and gLANG_CONV_TABLE.get(LangName.lower()):
+ return LangName
+ else:
+ EdkLogger.Error("Unicode File Parser",
+ ToolError.FORMAT_INVALID,
+ "Invalid RFC 1766 language code : %s" % LangName,
+ File)
+ elif length == 5:
+ if LangName[0:2].isalpha() and LangName[2] == '-':
+ for Key in gLANG_CONV_TABLE.keys():
+ if gLANG_CONV_TABLE.get(Key) == LangName[0:2].lower():
+ return Key
+ elif length >= 6:
+ if LangName[0:2].isalpha() and LangName[2] == '-':
+ for Key in gLANG_CONV_TABLE.keys():
+ if gLANG_CONV_TABLE.get(Key) == LangName[0:2].lower():
+ return Key
+ if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None and LangName[3] == '-':
+ for Key in gLANG_CONV_TABLE.keys():
+ if Key == LangName[0:3].lower():
+ return Key
+
+ EdkLogger.Error("Unicode File Parser",
+ ToolError.FORMAT_INVALID,
+ "Invalid RFC 4646 language code : %s" % LangName,
+ File)
+
+## GetLanguageCode
+#
+# Check the language code read from .UNI file and convert RFC 1766 codes to RFC 4646 codes if appropriate
+# RFC 1766 language codes supported in compatibility mode
+# RFC 4646 language codes supported in native mode
+#
+# @param LangName: Language codes read from .UNI file
+#
+# @retval LangName: Valid lanugage code in RFC 4646 format or None
+#
+def GetLanguageCode(LangName, IsCompatibleMode, File):
+ length = len(LangName)
+ if IsCompatibleMode:
+ if length == 3 and LangName.isalpha():
+ TempLangName = gLANG_CONV_TABLE.get(LangName.lower())
+ if TempLangName is not None:
+ return TempLangName
+ return LangName
+ else:
+ EdkLogger.Error("Unicode File Parser",
+ ToolError.FORMAT_INVALID,
+ "Invalid RFC 1766 language code : %s" % LangName,
+ File)
+ if (LangName[0] == 'X' or LangName[0] == 'x') and LangName[1] == '-':
+ return LangName
+ if length == 2:
+ if LangName.isalpha():
+ return LangName
+ elif length == 3:
+ if LangName.isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None:
+ return LangName
+ elif length == 5:
+ if LangName[0:2].isalpha() and LangName[2] == '-':
+ return LangName
+ elif length >= 6:
+ if LangName[0:2].isalpha() and LangName[2] == '-':
+ return LangName
+ if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None and LangName[3] == '-':
+ return LangName
+
+ EdkLogger.Error("Unicode File Parser",
+ ToolError.FORMAT_INVALID,
+ "Invalid RFC 4646 language code : %s" % LangName,
+ File)
+
+## FormatUniEntry
+#
+# Formatted the entry in Uni file.
+#
+# @param StrTokenName StrTokenName.
+# @param TokenValueList A list need to be processed.
+# @param ContainerFile ContainerFile.
+#
+# @return formatted entry
+def FormatUniEntry(StrTokenName, TokenValueList, ContainerFile):
+ SubContent = ''
+ PreFormatLength = 40
+ if len(StrTokenName) > PreFormatLength:
+ PreFormatLength = len(StrTokenName) + 1
+ for (Lang, Value) in TokenValueList:
+ if not Value or Lang == DT.TAB_LANGUAGE_EN_X:
+ continue
+ if Lang == '':
+ Lang = DT.TAB_LANGUAGE_EN_US
+ if Lang == 'eng':
+ Lang = DT.TAB_LANGUAGE_EN_US
+ elif len(Lang.split('-')[0]) == 3:
+ Lang = GetLanguageCode(Lang.split('-')[0], True, ContainerFile)
+ else:
+ Lang = GetLanguageCode(Lang, False, ContainerFile)
+ ValueList = Value.split('\n')
+ SubValueContent = ''
+ for SubValue in ValueList:
+ if SubValue.strip():
+ SubValueContent += \
+ ' ' * (PreFormatLength + len('#language en-US ')) + '\"%s\\n\"' % SubValue.strip() + '\r\n'
+ SubValueContent = SubValueContent[(PreFormatLength + len('#language en-US ')):SubValueContent.rfind('\\n')] \
+ + '\"' + '\r\n'
+ SubContent += ' '*PreFormatLength + '#language %-5s ' % Lang + SubValueContent
+ if SubContent:
+ SubContent = StrTokenName + ' '*(PreFormatLength - len(StrTokenName)) + SubContent[PreFormatLength:]
+ return SubContent
+
+
+## StringDefClassObject
+#
+# A structure for language definition
+#
+class StringDefClassObject(object):
+ def __init__(self, Name = None, Value = None, Referenced = False, Token = None, UseOtherLangDef = ''):
+ self.StringName = ''
+ self.StringNameByteList = []
+ self.StringValue = ''
+ self.StringValueByteList = ''
+ self.Token = 0
+ self.Referenced = Referenced
+ self.UseOtherLangDef = UseOtherLangDef
+ self.Length = 0
+
+ if Name is not None:
+ self.StringName = Name
+ self.StringNameByteList = UniToHexList(Name)
+ if Value is not None:
+ self.StringValue = Value
+ self.StringValueByteList = UniToHexList(self.StringValue)
+ self.Length = len(self.StringValueByteList)
+ if Token is not None:
+ self.Token = Token
+
+ def __str__(self):
+ return repr(self.StringName) + ' ' + \
+ repr(self.Token) + ' ' + \
+ repr(self.Referenced) + ' ' + \
+ repr(self.StringValue) + ' ' + \
+ repr(self.UseOtherLangDef)
+
+ def UpdateValue(self, Value = None):
+ if Value is not None:
+ if self.StringValue:
+ self.StringValue = self.StringValue + '\r\n' + Value
+ else:
+ self.StringValue = Value
+ self.StringValueByteList = UniToHexList(self.StringValue)
+ self.Length = len(self.StringValueByteList)
+
+## UniFileClassObject
+#
+# A structure for .uni file definition
+#
+class UniFileClassObject(object):
+ def __init__(self, FileList = None, IsCompatibleMode = False, IncludePathList = None):
+ self.FileList = FileList
+ self.File = None
+ self.IncFileList = FileList
+ self.UniFileHeader = ''
+ self.Token = 2
+ self.LanguageDef = [] #[ [u'LanguageIdentifier', u'PrintableName'], ... ]
+ self.OrderedStringList = {} #{ u'LanguageIdentifier' : [StringDefClassObject] }
+ self.OrderedStringDict = {} #{ u'LanguageIdentifier' : {StringName:(IndexInList)} }
+ self.OrderedStringListByToken = {} #{ u'LanguageIdentifier' : {Token: StringDefClassObject} }
+ self.IsCompatibleMode = IsCompatibleMode
+ if not IncludePathList:
+ self.IncludePathList = []
+ else:
+ self.IncludePathList = IncludePathList
+ if len(self.FileList) > 0:
+ self.LoadUniFiles(FileList)
+
+ #
+ # Get Language definition
+ #
+ def GetLangDef(self, File, Line):
+ Lang = distutils.util.split_quoted((Line.split(u"//")[0]))
+ if len(Lang) != 3:
+ try:
+ FileIn = codecs.open(File.Path, mode='rb', encoding='utf_8').readlines()
+ except UnicodeError as Xstr:
+ FileIn = codecs.open(File.Path, mode='rb', encoding='utf_16').readlines()
+ except UnicodeError as Xstr:
+ FileIn = codecs.open(File.Path, mode='rb', encoding='utf_16_le').readlines()
+ except:
+ EdkLogger.Error("Unicode File Parser",
+ ToolError.FILE_OPEN_FAILURE,
+ "File read failure: %s" % str(Xstr),
+ ExtraData=File)
+ LineNo = GetLineNo(FileIn, Line, False)
+ EdkLogger.Error("Unicode File Parser",
+ ToolError.PARSER_ERROR,
+ "Wrong language definition",
+ ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line,
+ File = File, Line = LineNo)
+ else:
+ LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File)
+ LangPrintName = Lang[2]
+
+ IsLangInDef = False
+ for Item in self.LanguageDef:
+ if Item[0] == LangName:
+ IsLangInDef = True
+ break
+
+ if not IsLangInDef:
+ self.LanguageDef.append([LangName, LangPrintName])
+
+ #
+ # Add language string
+ #
+ self.AddStringToList(u'$LANGUAGE_NAME', LangName, LangName, 0, True, Index=0)
+ self.AddStringToList(u'$PRINTABLE_LANGUAGE_NAME', LangName, LangPrintName, 1, True, Index=1)
+
+ if not IsLangInDef:
+ #
+ # The found STRING tokens will be added into new language string list
+ # so that the unique STRING identifier is reserved for all languages in the package list.
+ #
+ FirstLangName = self.LanguageDef[0][0]
+ if LangName != FirstLangName:
+ for Index in range (2, len (self.OrderedStringList[FirstLangName])):
+ Item = self.OrderedStringList[FirstLangName][Index]
+ if Item.UseOtherLangDef != '':
+ OtherLang = Item.UseOtherLangDef
+ else:
+ OtherLang = FirstLangName
+ self.OrderedStringList[LangName].append (StringDefClassObject(Item.StringName,
+ '',
+ Item.Referenced,
+ Item.Token,
+ OtherLang))
+ self.OrderedStringDict[LangName][Item.StringName] = len(self.OrderedStringList[LangName]) - 1
+ return True
+
+ #
+ # Get String name and value
+ #
+ def GetStringObject(self, Item):
+ Language = ''
+ Value = ''
+
+ Name = Item.split()[1]
+ # Check the string name is the upper character
+ if Name != '':
+ MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
+ if MatchString is None or MatchString.end(0) != len(Name):
+ EdkLogger.Error("Unicode File Parser",
+ ToolError.FORMAT_INVALID,
+ 'The string token name %s in UNI file %s must be upper case character.' %(Name, self.File))
+ LanguageList = Item.split(u'#language ')
+ for IndexI in range(len(LanguageList)):
+ if IndexI == 0:
+ continue
+ else:
+ Language = LanguageList[IndexI].split()[0]
+ #.replace(u'\r\n', u'')
+ Value = \
+ LanguageList[IndexI][LanguageList[IndexI].find(u'\"') + len(u'\"') : LanguageList[IndexI].rfind(u'\"')]
+ Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
+ self.AddStringToList(Name, Language, Value)
+
+ #
+ # Get include file list and load them
+ #
+ def GetIncludeFile(self, Item, Dir = None):
+ if Dir:
+ pass
+ FileName = Item[Item.find(u'!include ') + len(u'!include ') :Item.find(u' ', len(u'!include '))][1:-1]
+ self.LoadUniFile(FileName)
+
+ #
+ # Pre-process before parse .uni file
+ #
+ def PreProcess(self, File, IsIncludeFile=False):
+ if not os.path.exists(File.Path) or not os.path.isfile(File.Path):
+ EdkLogger.Error("Unicode File Parser",
+ ToolError.FILE_NOT_FOUND,
+ ExtraData=File.Path)
+
+ #
+ # Check file header of the Uni file
+ #
+# if not CheckUTF16FileHeader(File.Path):
+# EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
+# ExtraData='The file %s is either invalid UTF-16LE or it is missing the BOM.' % File.Path)
+
+ try:
+ FileIn = codecs.open(File.Path, mode='rb', encoding='utf_8').readlines()
+ except UnicodeError as Xstr:
+ FileIn = codecs.open(File.Path, mode='rb', encoding='utf_16').readlines()
+ except UnicodeError:
+ FileIn = codecs.open(File.Path, mode='rb', encoding='utf_16_le').readlines()
+ except:
+ EdkLogger.Error("Unicode File Parser", ToolError.FILE_OPEN_FAILURE, ExtraData=File.Path)
+
+
+ #
+ # get the file header
+ #
+ Lines = []
+ HeaderStart = False
+ HeaderEnd = False
+ if not self.UniFileHeader:
+ FirstGenHeader = True
+ else:
+ FirstGenHeader = False
+ for Line in FileIn:
+ Line = Line.strip()
+ if Line == u'':
+ continue
+ if Line.startswith(DT.TAB_COMMENT_EDK1_SPLIT) and (Line.find(DT.TAB_HEADER_COMMENT) > -1) \
+ and not HeaderEnd and not HeaderStart:
+ HeaderStart = True
+ if not Line.startswith(DT.TAB_COMMENT_EDK1_SPLIT) and HeaderStart and not HeaderEnd:
+ HeaderEnd = True
+ if Line.startswith(DT.TAB_COMMENT_EDK1_SPLIT) and HeaderStart and not HeaderEnd and FirstGenHeader:
+ self.UniFileHeader += Line + '\r\n'
+ continue
+
+ #
+ # Use unique identifier
+ #
+ FindFlag = -1
+ LineCount = 0
+ MultiLineFeedExits = False
+ #
+ # 0: initial value
+ # 1: single String entry exist
+ # 2: line feed exist under the some single String entry
+ #
+ StringEntryExistsFlag = 0
+ for Line in FileIn:
+ Line = FileIn[LineCount]
+ LineCount += 1
+ Line = Line.strip()
+ #
+ # Ignore comment line and empty line
+ #
+ if Line == u'' or Line.startswith(u'//'):
+ #
+ # Change the single line String entry flag status
+ #
+ if StringEntryExistsFlag == 1:
+ StringEntryExistsFlag = 2
+ #
+ # If the '#string' line and the '#language' line are not in the same line,
+ # there should be only one line feed character between them
+ #
+ if MultiLineFeedExits:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
+ continue
+
+ MultiLineFeedExits = False
+ #
+ # Process comment embedded in string define lines
+ #
+ FindFlag = Line.find(u'//')
+ if FindFlag != -1 and Line.find(u'//') < Line.find(u'"'):
+ Line = Line.replace(Line[FindFlag:], u' ')
+ if FileIn[LineCount].strip().startswith('#language'):
+ Line = Line + FileIn[LineCount]
+ FileIn[LineCount-1] = Line
+ FileIn[LineCount] = '\r\n'
+ LineCount -= 1
+ for Index in range (LineCount + 1, len (FileIn) - 1):
+ if (Index == len(FileIn) -1):
+ FileIn[Index] = '\r\n'
+ else:
+ FileIn[Index] = FileIn[Index + 1]
+ continue
+ CommIndex = GetCharIndexOutStr(u'/', Line)
+ if CommIndex > -1:
+ if (len(Line) - 1) > CommIndex:
+ if Line[CommIndex+1] == u'/':
+ Line = Line[:CommIndex].strip()
+ else:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
+ else:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
+
+ Line = Line.replace(UNICODE_WIDE_CHAR, WIDE_CHAR)
+ Line = Line.replace(UNICODE_NARROW_CHAR, NARROW_CHAR)
+ Line = Line.replace(UNICODE_NON_BREAKING_CHAR, NON_BREAKING_CHAR)
+
+ Line = Line.replace(u'\\\\', u'\u0006')
+ Line = Line.replace(u'\\r\\n', CR + LF)
+ Line = Line.replace(u'\\n', CR + LF)
+ Line = Line.replace(u'\\r', CR)
+ Line = Line.replace(u'\\t', u'\t')
+ Line = Line.replace(u'''\"''', u'''"''')
+ Line = Line.replace(u'\t', u' ')
+ Line = Line.replace(u'\u0006', u'\\')
+
+ #
+ # Check if single line has correct '"'
+ #
+ if Line.startswith(u'#string') and Line.find(u'#language') > -1 and Line.find('"') > Line.find(u'#language'):
+ if not Line.endswith('"'):
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
+ ExtraData='''The line %s misses '"' at the end of it in file %s'''
+ % (LineCount, File.Path))
+
+ #
+ # Between Name entry and Language entry can not contain line feed
+ #
+ if Line.startswith(u'#string') and Line.find(u'#language') == -1:
+ MultiLineFeedExits = True
+
+ if Line.startswith(u'#string') and Line.find(u'#language') > 0 and Line.find(u'"') < 0:
+ MultiLineFeedExits = True
+
+ #
+ # Between Language entry and String entry can not contain line feed
+ #
+ if Line.startswith(u'#language') and len(Line.split()) == 2:
+ MultiLineFeedExits = True
+
+ #
+ # Check the situation that there only has one '"' for the language entry
+ #
+ if Line.startswith(u'#string') and Line.find(u'#language') > 0 and Line.count(u'"') == 1:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
+ ExtraData='''The line %s misses '"' at the end of it in file %s'''
+ % (LineCount, File.Path))
+
+ #
+ # Check the situation that there has more than 2 '"' for the language entry
+ #
+ if Line.startswith(u'#string') and Line.find(u'#language') > 0 and Line.replace(u'\\"', '').count(u'"') > 2:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
+ ExtraData='''The line %s has more than 2 '"' for language entry in file %s'''
+ % (LineCount, File.Path))
+
+ #
+ # Between two String entry, can not contain line feed
+ #
+ if Line.startswith(u'"'):
+ if StringEntryExistsFlag == 2:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
+ Message=ST.ERR_UNIPARSE_LINEFEED_UP_EXIST % Line, ExtraData=File.Path)
+
+ StringEntryExistsFlag = 1
+ if not Line.endswith('"'):
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
+ ExtraData='''The line %s misses '"' at the end of it in file %s'''
+ % (LineCount, File.Path))
+
+ #
+ # Check the situation that there has more than 2 '"' for the language entry
+ #
+ if Line.strip() and Line.replace(u'\\"', '').count(u'"') > 2:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
+ ExtraData='''The line %s has more than 2 '"' for language entry in file %s'''
+ % (LineCount, File.Path))
+
+ elif Line.startswith(u'#language'):
+ if StringEntryExistsFlag == 2:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
+ Message=ST.ERR_UNI_MISS_STRING_ENTRY % Line, ExtraData=File.Path)
+ StringEntryExistsFlag = 0
+ else:
+ StringEntryExistsFlag = 0
+
+ Lines.append(Line)
+
+ #
+ # Convert string def format as below
+ #
+ # #string MY_STRING_1
+ # #language eng
+ # "My first English string line 1"
+ # "My first English string line 2"
+ # #string MY_STRING_1
+ # #language spa
+ # "Mi segunda secuencia 1"
+ # "Mi segunda secuencia 2"
+ #
+
+ if not IsIncludeFile and not Lines:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
+ Message=ST.ERR_UNIPARSE_NO_SECTION_EXIST, \
+ ExtraData=File.Path)
+
+ NewLines = []
+ StrName = u''
+ ExistStrNameList = []
+ for Line in Lines:
+ if StrName and not StrName.split()[1].startswith(DT.TAB_STR_TOKENCNAME + DT.TAB_UNDERLINE_SPLIT):
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
+ Message=ST.ERR_UNIPARSE_STRNAME_FORMAT_ERROR % StrName.split()[1], \
+ ExtraData=File.Path)
+
+ if StrName and len(StrName.split()[1].split(DT.TAB_UNDERLINE_SPLIT)) == 4:
+ StringTokenList = StrName.split()[1].split(DT.TAB_UNDERLINE_SPLIT)
+ if (StringTokenList[3].upper() in [DT.TAB_STR_TOKENPROMPT, DT.TAB_STR_TOKENHELP] and \
+ StringTokenList[3] not in [DT.TAB_STR_TOKENPROMPT, DT.TAB_STR_TOKENHELP]) or \
+ (StringTokenList[2].upper() == DT.TAB_STR_TOKENERR and StringTokenList[2] != DT.TAB_STR_TOKENERR):
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
+ Message=ST.ERR_UNIPARSE_STRTOKEN_FORMAT_ERROR % StrName.split()[1], \
+ ExtraData=File.Path)
+
+ if Line.count(u'#language') > 1:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
+ Message=ST.ERR_UNIPARSE_SEP_LANGENTRY_LINE % Line, \
+ ExtraData=File.Path)
+
+ if Line.startswith(u'//'):
+ continue
+ elif Line.startswith(u'#langdef'):
+ if len(Line.split()) == 2:
+ NewLines.append(Line)
+ continue
+ elif len(Line.split()) > 2 and Line.find(u'"') > 0:
+ NewLines.append(Line[:Line.find(u'"')].strip())
+ NewLines.append(Line[Line.find(u'"'):])
+ else:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
+ elif Line.startswith(u'#string'):
+ if len(Line.split()) == 2:
+ StrName = Line
+ if StrName:
+ if StrName.split()[1] not in ExistStrNameList:
+ ExistStrNameList.append(StrName.split()[1].strip())
+ elif StrName.split()[1] in [DT.TAB_INF_ABSTRACT, DT.TAB_INF_DESCRIPTION, \
+ DT.TAB_INF_BINARY_ABSTRACT, DT.TAB_INF_BINARY_DESCRIPTION, \
+ DT.TAB_DEC_PACKAGE_ABSTRACT, DT.TAB_DEC_PACKAGE_DESCRIPTION, \
+ DT.TAB_DEC_BINARY_ABSTRACT, DT.TAB_DEC_BINARY_DESCRIPTION]:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
+ Message=ST.ERR_UNIPARSE_MULTI_ENTRY_EXIST % StrName.split()[1], \
+ ExtraData=File.Path)
+ continue
+ elif len(Line.split()) == 4 and Line.find(u'#language') > 0:
+ if Line[Line.find(u'#language')-1] != ' ' or \
+ Line[Line.find(u'#language')+len(u'#language')] != u' ':
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
+
+ if Line.find(u'"') > 0:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
+
+ StrName = Line.split()[0] + u' ' + Line.split()[1]
+ if StrName:
+ if StrName.split()[1] not in ExistStrNameList:
+ ExistStrNameList.append(StrName.split()[1].strip())
+ elif StrName.split()[1] in [DT.TAB_INF_ABSTRACT, DT.TAB_INF_DESCRIPTION, \
+ DT.TAB_INF_BINARY_ABSTRACT, DT.TAB_INF_BINARY_DESCRIPTION, \
+ DT.TAB_DEC_PACKAGE_ABSTRACT, DT.TAB_DEC_PACKAGE_DESCRIPTION, \
+ DT.TAB_DEC_BINARY_ABSTRACT, DT.TAB_DEC_BINARY_DESCRIPTION]:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
+ Message=ST.ERR_UNIPARSE_MULTI_ENTRY_EXIST % StrName.split()[1], \
+ ExtraData=File.Path)
+ if IsIncludeFile:
+ if StrName not in NewLines:
+ NewLines.append((Line[:Line.find(u'#language')]).strip())
+ else:
+ NewLines.append((Line[:Line.find(u'#language')]).strip())
+ NewLines.append((Line[Line.find(u'#language'):]).strip())
+ elif len(Line.split()) > 4 and Line.find(u'#language') > 0 and Line.find(u'"') > 0:
+ if Line[Line.find(u'#language')-1] != u' ' or \
+ Line[Line.find(u'#language')+len(u'#language')] != u' ':
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
+
+ if Line[Line.find(u'"')-1] != u' ':
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
+
+ StrName = Line.split()[0] + u' ' + Line.split()[1]
+ if StrName:
+ if StrName.split()[1] not in ExistStrNameList:
+ ExistStrNameList.append(StrName.split()[1].strip())
+ elif StrName.split()[1] in [DT.TAB_INF_ABSTRACT, DT.TAB_INF_DESCRIPTION, \
+ DT.TAB_INF_BINARY_ABSTRACT, DT.TAB_INF_BINARY_DESCRIPTION, \
+ DT.TAB_DEC_PACKAGE_ABSTRACT, DT.TAB_DEC_PACKAGE_DESCRIPTION, \
+ DT.TAB_DEC_BINARY_ABSTRACT, DT.TAB_DEC_BINARY_DESCRIPTION]:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
+ Message=ST.ERR_UNIPARSE_MULTI_ENTRY_EXIST % StrName.split()[1], \
+ ExtraData=File.Path)
+ if IsIncludeFile:
+ if StrName not in NewLines:
+ NewLines.append((Line[:Line.find(u'#language')]).strip())
+ else:
+ NewLines.append((Line[:Line.find(u'#language')]).strip())
+ NewLines.append((Line[Line.find(u'#language'):Line.find(u'"')]).strip())
+ NewLines.append((Line[Line.find(u'"'):]).strip())
+ else:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
+ elif Line.startswith(u'#language'):
+ if len(Line.split()) == 2:
+ if IsIncludeFile:
+ if StrName not in NewLines:
+ NewLines.append(StrName)
+ else:
+ NewLines.append(StrName)
+ NewLines.append(Line)
+ elif len(Line.split()) > 2 and Line.find(u'"') > 0:
+ if IsIncludeFile:
+ if StrName not in NewLines:
+ NewLines.append(StrName)
+ else:
+ NewLines.append(StrName)
+ NewLines.append((Line[:Line.find(u'"')]).strip())
+ NewLines.append((Line[Line.find(u'"'):]).strip())
+ else:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
+ elif Line.startswith(u'"'):
+ if u'#string' in Line or u'#language' in Line:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
+ NewLines.append(Line)
+ else:
+ print(Line)
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
+
+ if StrName and not StrName.split()[1].startswith(u'STR_'):
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
+ Message=ST.ERR_UNIPARSE_STRNAME_FORMAT_ERROR % StrName.split()[1], \
+ ExtraData=File.Path)
+
+ if StrName and not NewLines:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
+ Message=ST.ERR_UNI_MISS_LANGENTRY % StrName, \
+ ExtraData=File.Path)
+
+ #
+ # Check Abstract, Description, BinaryAbstract and BinaryDescription order,
+ # should be Abstract, Description, BinaryAbstract, BinaryDescription
+ AbstractPosition = -1
+ DescriptionPosition = -1
+ BinaryAbstractPosition = -1
+ BinaryDescriptionPosition = -1
+ for StrName in ExistStrNameList:
+ if DT.TAB_HEADER_ABSTRACT.upper() in StrName:
+ if 'BINARY' in StrName:
+ BinaryAbstractPosition = ExistStrNameList.index(StrName)
+ else:
+ AbstractPosition = ExistStrNameList.index(StrName)
+ if DT.TAB_HEADER_DESCRIPTION.upper() in StrName:
+ if 'BINARY' in StrName:
+ BinaryDescriptionPosition = ExistStrNameList.index(StrName)
+ else:
+ DescriptionPosition = ExistStrNameList.index(StrName)
+
+ OrderList = sorted([AbstractPosition, DescriptionPosition])
+ BinaryOrderList = sorted([BinaryAbstractPosition, BinaryDescriptionPosition])
+ Min = OrderList[0]
+ Max = OrderList[1]
+ BinaryMin = BinaryOrderList[0]
+ BinaryMax = BinaryOrderList[1]
+ if BinaryDescriptionPosition > -1:
+ if not(BinaryDescriptionPosition == BinaryMax and BinaryAbstractPosition == BinaryMin and \
+ BinaryMax > Max):
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
+ Message=ST.ERR_UNIPARSE_ENTRY_ORDER_WRONG, \
+ ExtraData=File.Path)
+ elif BinaryAbstractPosition > -1:
+ if not(BinaryAbstractPosition > Max):
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
+ Message=ST.ERR_UNIPARSE_ENTRY_ORDER_WRONG, \
+ ExtraData=File.Path)
+
+ if DescriptionPosition > -1:
+ if not(DescriptionPosition == Max and AbstractPosition == Min and \
+ DescriptionPosition > AbstractPosition):
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
+ Message=ST.ERR_UNIPARSE_ENTRY_ORDER_WRONG, \
+ ExtraData=File.Path)
+
+ if not self.UniFileHeader:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
+ Message = ST.ERR_NO_SOURCE_HEADER,
+ ExtraData=File.Path)
+
+ return NewLines
+
+ #
+ # Load a .uni file
+ #
+ def LoadUniFile(self, File = None):
+ if File is None:
+ EdkLogger.Error("Unicode File Parser",
+ ToolError.PARSER_ERROR,
+ Message='No unicode file is given',
+ ExtraData=File.Path)
+
+ self.File = File
+
+ #
+ # Process special char in file
+ #
+ Lines = self.PreProcess(File)
+
+ #
+ # Get Unicode Information
+ #
+ for IndexI in range(len(Lines)):
+ Line = Lines[IndexI]
+ if (IndexI + 1) < len(Lines):
+ SecondLine = Lines[IndexI + 1]
+ if (IndexI + 2) < len(Lines):
+ ThirdLine = Lines[IndexI + 2]
+
+ #
+ # Get Language def information
+ #
+ if Line.find(u'#langdef ') >= 0:
+ self.GetLangDef(File, Line + u' ' + SecondLine)
+ continue
+
+ Name = ''
+ Language = ''
+ Value = ''
+ CombineToken = False
+ #
+ # Get string def information format as below
+ #
+ # #string MY_STRING_1
+ # #language eng
+ # "My first English string line 1"
+ # "My first English string line 2"
+ # #string MY_STRING_1
+ # #language spa
+ # "Mi segunda secuencia 1"
+ # "Mi segunda secuencia 2"
+ #
+ if Line.find(u'#string ') >= 0 and Line.find(u'#language ') < 0 and \
+ SecondLine.find(u'#string ') < 0 and SecondLine.find(u'#language ') >= 0 and \
+ ThirdLine.find(u'#string ') < 0 and ThirdLine.find(u'#language ') < 0:
+ if Line.find('"') > 0 or SecondLine.find('"') > 0:
+ EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
+ Message=ST.ERR_UNIPARSE_DBLQUOTE_UNMATCHED,
+ ExtraData=File.Path)
+
+ Name = Line[Line.find(u'#string ') + len(u'#string ') : ].strip(' ')
+ Language = SecondLine[SecondLine.find(u'#language ') + len(u'#language ') : ].strip(' ')
+ for IndexJ in range(IndexI + 2, len(Lines)):
+ if Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') < 0 and \
+ Lines[IndexJ].strip().startswith(u'"') and Lines[IndexJ].strip().endswith(u'"'):
+ if Lines[IndexJ][-2] == ' ':
+ CombineToken = True
+ if CombineToken:
+ if Lines[IndexJ].strip()[1:-1].strip():
+ Value = Value + Lines[IndexJ].strip()[1:-1].rstrip() + ' '
+ else:
+ Value = Value + Lines[IndexJ].strip()[1:-1]
+ CombineToken = False
+ else:
+ Value = Value + Lines[IndexJ].strip()[1:-1] + '\r\n'
+ else:
+ IndexI = IndexJ
+ break
+ if Value.endswith('\r\n'):
+ Value = Value[: Value.rfind('\r\n')]
+ Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
+ self.AddStringToList(Name, Language, Value)
+ continue
+
+ #
+ # Load multiple .uni files
+ #
+ def LoadUniFiles(self, FileList):
+ if len(FileList) > 0:
+ for File in FileList:
+ FilePath = File.Path.strip()
+ if FilePath.endswith('.uni') or FilePath.endswith('.UNI') or FilePath.endswith('.Uni'):
+ self.LoadUniFile(File)
+
+ #
+ # Add a string to list
+ #
+ def AddStringToList(self, Name, Language, Value, Token = 0, Referenced = False, UseOtherLangDef = '', Index = -1):
+ for LangNameItem in self.LanguageDef:
+ if Language == LangNameItem[0]:
+ break
+
+ if Language not in self.OrderedStringList:
+ self.OrderedStringList[Language] = []
+ self.OrderedStringDict[Language] = {}
+
+ IsAdded = True
+ if Name in self.OrderedStringDict[Language]:
+ IsAdded = False
+ if Value is not None:
+ ItemIndexInList = self.OrderedStringDict[Language][Name]
+ Item = self.OrderedStringList[Language][ItemIndexInList]
+ Item.UpdateValue(Value)
+ Item.UseOtherLangDef = ''
+
+ if IsAdded:
+ Token = len(self.OrderedStringList[Language])
+ if Index == -1:
+ self.OrderedStringList[Language].append(StringDefClassObject(Name,
+ Value,
+ Referenced,
+ Token,
+ UseOtherLangDef))
+ self.OrderedStringDict[Language][Name] = Token
+ for LangName in self.LanguageDef:
+ #
+ # New STRING token will be added into all language string lists.
+ # so that the unique STRING identifier is reserved for all languages in the package list.
+ #
+ if LangName[0] != Language:
+ if UseOtherLangDef != '':
+ OtherLangDef = UseOtherLangDef
+ else:
+ OtherLangDef = Language
+ self.OrderedStringList[LangName[0]].append(StringDefClassObject(Name,
+ '',
+ Referenced,
+ Token,
+ OtherLangDef))
+ self.OrderedStringDict[LangName[0]][Name] = len(self.OrderedStringList[LangName[0]]) - 1
+ else:
+ self.OrderedStringList[Language].insert(Index, StringDefClassObject(Name,
+ Value,
+ Referenced,
+ Token,
+ UseOtherLangDef))
+ self.OrderedStringDict[Language][Name] = Index
+
+ #
+ # Set the string as referenced
+ #
+ def SetStringReferenced(self, Name):
+ #
+ # String stoken are added in the same order in all language string lists.
+ # So, only update the status of string stoken in first language string list.
+ #
+ Lang = self.LanguageDef[0][0]
+ if Name in self.OrderedStringDict[Lang]:
+ ItemIndexInList = self.OrderedStringDict[Lang][Name]
+ Item = self.OrderedStringList[Lang][ItemIndexInList]
+ Item.Referenced = True
+
+ #
+ # Search the string in language definition by Name
+ #
+ def FindStringValue(self, Name, Lang):
+ if Name in self.OrderedStringDict[Lang]:
+ ItemIndexInList = self.OrderedStringDict[Lang][Name]
+ return self.OrderedStringList[Lang][ItemIndexInList]
+
+ return None
+
+ #
+ # Search the string in language definition by Token
+ #
+ def FindByToken(self, Token, Lang):
+ for Item in self.OrderedStringList[Lang]:
+ if Item.Token == Token:
+ return Item
+
+ return None
+
+ #
+ # Re-order strings and re-generate tokens
+ #
+ def ReToken(self):
+ if len(self.LanguageDef) == 0:
+ return None
+ #
+ # Retoken all language strings according to the status of string stoken in the first language string.
+ #
+ FirstLangName = self.LanguageDef[0][0]
+
+ # Convert the OrderedStringList to be OrderedStringListByToken in order to faciliate future search by token
+ for LangNameItem in self.LanguageDef:
+ self.OrderedStringListByToken[LangNameItem[0]] = {}
+
+ #
+ # Use small token for all referred string stoken.
+ #
+ RefToken = 0
+ for Index in range (0, len (self.OrderedStringList[FirstLangName])):
+ FirstLangItem = self.OrderedStringList[FirstLangName][Index]
+ if FirstLangItem.Referenced == True:
+ for LangNameItem in self.LanguageDef:
+ LangName = LangNameItem[0]
+ OtherLangItem = self.OrderedStringList[LangName][Index]
+ OtherLangItem.Referenced = True
+ OtherLangItem.Token = RefToken
+ self.OrderedStringListByToken[LangName][OtherLangItem.Token] = OtherLangItem
+ RefToken = RefToken + 1
+
+ #
+ # Use big token for all unreferred string stoken.
+ #
+ UnRefToken = 0
+ for Index in range (0, len (self.OrderedStringList[FirstLangName])):
+ FirstLangItem = self.OrderedStringList[FirstLangName][Index]
+ if FirstLangItem.Referenced == False:
+ for LangNameItem in self.LanguageDef:
+ LangName = LangNameItem[0]
+ OtherLangItem = self.OrderedStringList[LangName][Index]
+ OtherLangItem.Token = RefToken + UnRefToken
+ self.OrderedStringListByToken[LangName][OtherLangItem.Token] = OtherLangItem
+ UnRefToken = UnRefToken + 1
+
+ #
+ # Show the instance itself
+ #
+ def ShowMe(self):
+ print(self.LanguageDef)
+ #print self.OrderedStringList
+ for Item in self.OrderedStringList:
+ print(Item)
+ for Member in self.OrderedStringList[Item]:
+ print(str(Member))
+
+ #
+ # Read content from '!include' UNI file
+ #
+ def ReadIncludeUNIfile(self, FilaPath):
+ if self.File:
+ pass
+
+ if not os.path.exists(FilaPath) or not os.path.isfile(FilaPath):
+ EdkLogger.Error("Unicode File Parser",
+ ToolError.FILE_NOT_FOUND,
+ ExtraData=FilaPath)
+ try:
+ FileIn = codecs.open(FilaPath, mode='rb', encoding='utf_8').readlines()
+ except UnicodeError as Xstr:
+ FileIn = codecs.open(FilaPath, mode='rb', encoding='utf_16').readlines()
+ except UnicodeError:
+ FileIn = codecs.open(FilaPath, mode='rb', encoding='utf_16_le').readlines()
+ except:
+ EdkLogger.Error("Unicode File Parser", ToolError.FILE_OPEN_FAILURE, ExtraData=FilaPath)
+ return FileIn
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py
new file mode 100755
index 00000000..8ef315ad
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py
@@ -0,0 +1,223 @@
+## @file
+# This is an XML API that uses a syntax similar to XPath, but it is written in
+# standard python so that no extra python packages are required to use it.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+XmlRoutines
+'''
+
+##
+# Import Modules
+#
+import xml.dom.minidom
+import re
+import codecs
+from Logger.ToolError import PARSER_ERROR
+import Logger.Log as Logger
+
+## Create a element of XML
+#
+# @param Name
+# @param String
+# @param NodeList
+# @param AttributeList
+#
+def CreateXmlElement(Name, String, NodeList, AttributeList):
+ Doc = xml.dom.minidom.Document()
+ Element = Doc.createElement(Name)
+ if String != '' and String is not None:
+ Element.appendChild(Doc.createTextNode(String))
+
+ for Item in NodeList:
+ if isinstance(Item, type([])):
+ Key = Item[0]
+ Value = Item[1]
+ if Key != '' and Key is not None and Value != '' and Value is not None:
+ Node = Doc.createElement(Key)
+ Node.appendChild(Doc.createTextNode(Value))
+ Element.appendChild(Node)
+ else:
+ Element.appendChild(Item)
+ for Item in AttributeList:
+ Key = Item[0]
+ Value = Item[1]
+ if Key != '' and Key is not None and Value != '' and Value is not None:
+ Element.setAttribute(Key, Value)
+
+ return Element
+
+## Get a list of XML nodes using XPath style syntax.
+#
+# Return a list of XML DOM nodes from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty list is returned.
+#
+# @param Dom The root XML DOM node.
+# @param String A XPath style path.
+#
+def XmlList(Dom, String):
+ if String is None or String == "" or Dom is None or Dom == "":
+ return []
+ if Dom.nodeType == Dom.DOCUMENT_NODE:
+ Dom = Dom.documentElement
+ if String[0] == "/":
+ String = String[1:]
+ TagList = String.split('/')
+ Nodes = [Dom]
+ Index = 0
+ End = len(TagList) - 1
+ while Index <= End:
+ ChildNodes = []
+ for Node in Nodes:
+ if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == \
+ TagList[Index]:
+ if Index < End:
+ ChildNodes.extend(Node.childNodes)
+ else:
+ ChildNodes.append(Node)
+ Nodes = ChildNodes
+ ChildNodes = []
+ Index += 1
+
+ return Nodes
+
+
+## Get a single XML node using XPath style syntax.
+#
+# Return a single XML DOM node from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM node.
+# @param String A XPath style path.
+#
+def XmlNode(Dom, String):
+ if String is None or String == "" or Dom is None or Dom == "":
+ return None
+ if Dom.nodeType == Dom.DOCUMENT_NODE:
+ Dom = Dom.documentElement
+ if String[0] == "/":
+ String = String[1:]
+ TagList = String.split('/')
+ Index = 0
+ End = len(TagList) - 1
+ ChildNodes = [Dom]
+ while Index <= End:
+ for Node in ChildNodes:
+ if Node.nodeType == Node.ELEMENT_NODE and \
+ Node.tagName == TagList[Index]:
+ if Index < End:
+ ChildNodes = Node.childNodes
+ else:
+ return Node
+ break
+ Index += 1
+ return None
+
+
+## Get a single XML element using XPath style syntax.
+#
+# Return a single XML element from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+# @param Strin A XPath style path.
+#
+def XmlElement(Dom, String):
+ try:
+ return XmlNode(Dom, String).firstChild.data.strip()
+ except BaseException:
+ return ""
+
+## Get a single XML element using XPath style syntax.
+#
+# Similar with XmlElement, but do not strip all the leading and tailing space
+# and newline, instead just remove the newline and spaces introduced by
+# toprettyxml()
+#
+# @param Dom The root XML DOM object.
+# @param Strin A XPath style path.
+#
+def XmlElement2(Dom, String):
+ try:
+ HelpStr = XmlNode(Dom, String).firstChild.data
+ gRemovePrettyRe = re.compile(r"""(?:(\n *) )(.*)\1""", re.DOTALL)
+ HelpStr = re.sub(gRemovePrettyRe, r"\2", HelpStr)
+ return HelpStr
+ except BaseException:
+ return ""
+
+
+## Get a single XML element of the current node.
+#
+# Return a single XML element specified by the current root Dom.
+# If the input Dom is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+#
+def XmlElementData(Dom):
+ try:
+ return Dom.firstChild.data.strip()
+ except BaseException:
+ return ""
+
+
+## Get a list of XML elements using XPath style syntax.
+#
+# Return a list of XML elements from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty list is returned.
+#
+# @param Dom The root XML DOM object.
+# @param String A XPath style path.
+#
+def XmlElementList(Dom, String):
+ return list(map(XmlElementData, XmlList(Dom, String)))
+
+
+## Get the XML attribute of the current node.
+#
+# Return a single XML attribute named Attribute from the current root Dom.
+# If the input Dom or Attribute is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+# @param Attribute The name of Attribute.
+#
+def XmlAttribute(Dom, Attribute):
+ try:
+ return Dom.getAttribute(Attribute)
+ except BaseException:
+ return ''
+
+
+## Get the XML node name of the current node.
+#
+# Return a single XML node name from the current root Dom.
+# If the input Dom is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+#
+def XmlNodeName(Dom):
+ try:
+ return Dom.nodeName.strip()
+ except BaseException:
+ return ''
+
+## Parse an XML file.
+#
+# Parse the input XML file named FileName and return a XML DOM it stands for.
+# If the input File is not a valid XML file, then an empty string is returned.
+#
+# @param FileName The XML file name.
+#
+def XmlParseFile(FileName):
+ try:
+ XmlFile = codecs.open(FileName, 'rb')
+ Dom = xml.dom.minidom.parse(XmlFile)
+ XmlFile.close()
+ return Dom
+ except BaseException as XExcept:
+ XmlFile.close()
+ Logger.Error('\nUPT', PARSER_ERROR, XExcept, File=FileName, RaiseError=True)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Xml/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Xml/__init__.py
new file mode 100644
index 00000000..a800a619
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/Xml/__init__.py
@@ -0,0 +1,14 @@
+## @file
+# Python 'Library' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Xml
+'''
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/__init__.py
new file mode 100644
index 00000000..fa602a2d
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Library/__init__.py
@@ -0,0 +1,14 @@
+## @file
+# Python 'Library' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Library
+'''
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/Log.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/Log.py
new file mode 100755
index 00000000..4f9b0b6c
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/Log.py
@@ -0,0 +1,319 @@
+## @file
+# This file implements the log mechanism for Python tools.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Logger
+'''
+
+## Import modules
+from sys import argv
+from sys import stdout
+from sys import stderr
+import os.path
+from os import remove
+from logging import getLogger
+from logging import Formatter
+from logging import StreamHandler
+from logging import FileHandler
+from traceback import extract_stack
+
+from Logger.ToolError import FatalError
+from Logger.ToolError import WARNING_AS_ERROR
+from Logger.ToolError import gERROR_MESSAGE
+from Logger.ToolError import UNKNOWN_ERROR
+from Library import GlobalData
+
+#
+# Log level constants
+#
+DEBUG_0 = 1
+DEBUG_1 = 2
+DEBUG_2 = 3
+DEBUG_3 = 4
+DEBUG_4 = 5
+DEBUG_5 = 6
+DEBUG_6 = 7
+DEBUG_7 = 8
+DEBUG_8 = 9
+DEBUG_9 = 10
+VERBOSE = 15
+INFO = 20
+WARN = 30
+QUIET = 40
+QUIET_1 = 41
+ERROR = 50
+SILENT = 60
+
+IS_RAISE_ERROR = True
+SUPRESS_ERROR = False
+
+#
+# Tool name
+#
+_TOOL_NAME = os.path.basename(argv[0])
+#
+# For validation purpose
+#
+_LOG_LEVELS = [DEBUG_0, DEBUG_1, DEBUG_2, DEBUG_3, DEBUG_4, DEBUG_5, DEBUG_6, \
+ DEBUG_7, DEBUG_8, DEBUG_9, VERBOSE, WARN, INFO, ERROR, QUIET, \
+ QUIET_1, SILENT]
+#
+# For DEBUG level (All DEBUG_0~9 are applicable)
+#
+_DEBUG_LOGGER = getLogger("tool_debug")
+_DEBUG_FORMATTER = Formatter("[%(asctime)s.%(msecs)d]: %(message)s", \
+ datefmt="%H:%M:%S")
+#
+# For VERBOSE, INFO, WARN level
+#
+_INFO_LOGGER = getLogger("tool_info")
+_INFO_FORMATTER = Formatter("%(message)s")
+#
+# For ERROR level
+#
+_ERROR_LOGGER = getLogger("tool_error")
+_ERROR_FORMATTER = Formatter("%(message)s")
+
+#
+# String templates for ERROR/WARN/DEBUG log message
+#
+_ERROR_MESSAGE_TEMPLATE = \
+('\n\n%(tool)s...\n%(file)s(%(line)s): error %(errorcode)04X: %(msg)s\n\t%(extra)s')
+
+__ERROR_MESSAGE_TEMPLATE_WITHOUT_FILE = \
+'\n\n%(tool)s...\n : error %(errorcode)04X: %(msg)s\n\t%(extra)s'
+
+_WARNING_MESSAGE_TEMPLATE = '%(tool)s...\n%(file)s(%(line)s): warning: %(msg)s'
+_WARNING_MESSAGE_TEMPLATE_WITHOUT_FILE = '%(tool)s: : warning: %(msg)s'
+_DEBUG_MESSAGE_TEMPLATE = '%(file)s(%(line)s): debug: \n %(msg)s'
+
+
+#
+# Log INFO message
+#
+#Info = _INFO_LOGGER.info
+
+def Info(msg, *args, **kwargs):
+ _INFO_LOGGER.info(msg, *args, **kwargs)
+
+#
+# Log information which should be always put out
+#
+def Quiet(msg, *args, **kwargs):
+ _ERROR_LOGGER.error(msg, *args, **kwargs)
+
+## Log debug message
+#
+# @param Level DEBUG level (DEBUG0~9)
+# @param Message Debug information
+# @param ExtraData More information associated with "Message"
+#
+def Debug(Level, Message, ExtraData=None):
+ if _DEBUG_LOGGER.level > Level:
+ return
+ if Level > DEBUG_9:
+ return
+ #
+ # Find out the caller method information
+ #
+ CallerStack = extract_stack()[-2]
+ TemplateDict = {
+ "file" : CallerStack[0],
+ "line" : CallerStack[1],
+ "msg" : Message,
+ }
+
+ if ExtraData is not None:
+ LogText = _DEBUG_MESSAGE_TEMPLATE % TemplateDict + "\n %s" % ExtraData
+ else:
+ LogText = _DEBUG_MESSAGE_TEMPLATE % TemplateDict
+
+ _DEBUG_LOGGER.log(Level, LogText)
+
+## Log verbose message
+#
+# @param Message Verbose information
+#
+def Verbose(Message):
+ return _INFO_LOGGER.log(VERBOSE, Message)
+
+## Log warning message
+#
+# Warning messages are those which might be wrong but won't fail the tool.
+#
+# @param ToolName The name of the tool. If not given, the name of caller
+# method will be used.
+# @param Message Warning information
+# @param File The name of file which caused the warning.
+# @param Line The line number in the "File" which caused the warning.
+# @param ExtraData More information associated with "Message"
+#
+def Warn(ToolName, Message, File=None, Line=None, ExtraData=None):
+ if _INFO_LOGGER.level > WARN:
+ return
+ #
+ # if no tool name given, use caller's source file name as tool name
+ #
+ if ToolName is None or ToolName == "":
+ ToolName = os.path.basename(extract_stack()[-2][0])
+
+ if Line is None:
+ Line = "..."
+ else:
+ Line = "%d" % Line
+
+ TemplateDict = {
+ "tool" : ToolName,
+ "file" : File,
+ "line" : Line,
+ "msg" : Message,
+ }
+
+ if File is not None:
+ LogText = _WARNING_MESSAGE_TEMPLATE % TemplateDict
+ else:
+ LogText = _WARNING_MESSAGE_TEMPLATE_WITHOUT_FILE % TemplateDict
+
+ if ExtraData is not None:
+ LogText += "\n %s" % ExtraData
+
+ _INFO_LOGGER.log(WARN, LogText)
+ #
+ # Raise an exception if indicated
+ #
+ if GlobalData.gWARNING_AS_ERROR == True:
+ raise FatalError(WARNING_AS_ERROR)
+
+## Log ERROR message
+#
+# Once an error messages is logged, the tool's execution will be broken by
+# raising an exception. If you don't want to break the execution later, you
+# can give "RaiseError" with "False" value.
+#
+# @param ToolName The name of the tool. If not given, the name of caller
+# method will be used.
+# @param ErrorCode The error code
+# @param Message Warning information
+# @param File The name of file which caused the error.
+# @param Line The line number in the "File" which caused the warning.
+# @param ExtraData More information associated with "Message"
+# @param RaiseError Raise an exception to break the tool's execution if
+# it's True. This is the default behavior.
+#
+def Error(ToolName, ErrorCode, Message=None, File=None, Line=None, \
+ ExtraData=None, RaiseError=IS_RAISE_ERROR):
+ if ToolName:
+ pass
+ if Line is None:
+ Line = "..."
+ else:
+ Line = "%d" % Line
+
+ if Message is None:
+ if ErrorCode in gERROR_MESSAGE:
+ Message = gERROR_MESSAGE[ErrorCode]
+ else:
+ Message = gERROR_MESSAGE[UNKNOWN_ERROR]
+
+ if ExtraData is None:
+ ExtraData = ""
+
+ TemplateDict = {
+ "tool" : _TOOL_NAME,
+ "file" : File,
+ "line" : Line,
+ "errorcode" : ErrorCode,
+ "msg" : Message,
+ "extra" : ExtraData
+ }
+
+ if File is not None:
+ LogText = _ERROR_MESSAGE_TEMPLATE % TemplateDict
+ else:
+ LogText = __ERROR_MESSAGE_TEMPLATE_WITHOUT_FILE % TemplateDict
+
+ if not SUPRESS_ERROR:
+ _ERROR_LOGGER.log(ERROR, LogText)
+ if RaiseError:
+ raise FatalError(ErrorCode)
+
+
+## Initialize log system
+#
+def Initialize():
+ #
+ # Since we use different format to log different levels of message into
+ # different place (stdout or stderr), we have to use different "Logger"
+ # objects to do this.
+ #
+ # For DEBUG level (All DEBUG_0~9 are applicable)
+ _DEBUG_LOGGER.setLevel(INFO)
+ _DebugChannel = StreamHandler(stdout)
+ _DebugChannel.setFormatter(_DEBUG_FORMATTER)
+ _DEBUG_LOGGER.addHandler(_DebugChannel)
+ #
+ # For VERBOSE, INFO, WARN level
+ #
+ _INFO_LOGGER.setLevel(INFO)
+ _InfoChannel = StreamHandler(stdout)
+ _InfoChannel.setFormatter(_INFO_FORMATTER)
+ _INFO_LOGGER.addHandler(_InfoChannel)
+ #
+ # For ERROR level
+ #
+ _ERROR_LOGGER.setLevel(INFO)
+ _ErrorCh = StreamHandler(stderr)
+ _ErrorCh.setFormatter(_ERROR_FORMATTER)
+ _ERROR_LOGGER.addHandler(_ErrorCh)
+
+
+## Set log level
+#
+# @param Level One of log level in _LogLevel
+#
+def SetLevel(Level):
+ if Level not in _LOG_LEVELS:
+ Info("Not supported log level (%d). Use default level instead." % \
+ Level)
+ Level = INFO
+ _DEBUG_LOGGER.setLevel(Level)
+ _INFO_LOGGER.setLevel(Level)
+ _ERROR_LOGGER.setLevel(Level)
+
+## Get current log level
+#
+def GetLevel():
+ return _INFO_LOGGER.getEffectiveLevel()
+
+## Raise up warning as error
+#
+def SetWarningAsError():
+ GlobalData.gWARNING_AS_ERROR = True
+
+## Specify a file to store the log message as well as put on console
+#
+# @param LogFile The file path used to store the log message
+#
+def SetLogFile(LogFile):
+ if os.path.exists(LogFile):
+ remove(LogFile)
+
+ _Ch = FileHandler(LogFile)
+ _Ch.setFormatter(_DEBUG_FORMATTER)
+ _DEBUG_LOGGER.addHandler(_Ch)
+
+ _Ch = FileHandler(LogFile)
+ _Ch.setFormatter(_INFO_FORMATTER)
+ _INFO_LOGGER.addHandler(_Ch)
+
+ _Ch = FileHandler(LogFile)
+ _Ch.setFormatter(_ERROR_FORMATTER)
+ _ERROR_LOGGER.addHandler(_Ch)
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/StringTable.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/StringTable.py
new file mode 100644
index 00000000..8cdbb125
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/StringTable.py
@@ -0,0 +1,860 @@
+## @file
+# This file is used to define strings used in the UPT tool
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+##
+"""
+This file contains user visible strings in a format that can be used for
+localization
+"""
+
+import gettext
+
+#
+# string table starts here...
+#
+
+## strings are classified as following types
+# MSG_...: it is a message string
+# ERR_...: it is a error string
+# WRN_...: it is a warning string
+# HLP_...: it is a help string
+#
+
+_ = gettext.gettext
+
+MSG_USAGE_STRING = _("\n"
+ "UEFI Packaging Tool (UEFIPT)\n"
+ "%prog [options]"
+ )
+
+##
+# Version and Copyright
+#
+MSG_VERSION_NUMBER = _("1.1")
+MSG_VERSION = _("UEFI Packaging Tool (UEFIPT) - Revision " + \
+ MSG_VERSION_NUMBER)
+MSG_COPYRIGHT = _("Copyright (c) 2011 - 2018 Intel Corporation All Rights Reserved.")
+MSG_VERSION_COPYRIGHT = _("\n %s\n %s" % (MSG_VERSION, MSG_COPYRIGHT))
+MSG_USAGE = _("%s [options]\n%s" % ("UPT", MSG_VERSION_COPYRIGHT))
+MSG_DESCRIPTION = _("The UEFIPT is used to create, " + \
+ "install or remove a UEFI Distribution Package. " + \
+ "If WORKSPACE environment variable is present, " + \
+ "then UPT will install packages to the location specified by WORKSPACE, " + \
+ "otherwise UPT will install packages to the current directory. " + \
+ "Option -n will override this default installation location")
+
+#
+# INF Parser related strings.
+#
+ERR_INF_PARSER_HEADER_FILE = _(
+ "The Header comment section should start with an @file at the top.")
+ERR_INF_PARSER_HEADER_MISSGING = _(
+ "The Header comment is missing. It must be corrected before continuing.")
+ERR_INF_PARSER_UNKNOWN_SECTION = _("An unknown section was found. "
+ "It must be corrected before continuing. ")
+ERR_INF_PARSER_NO_SECTION_ERROR = _("No section was found. "
+ "A section must be included before continuing.")
+ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID = \
+ _("Build Option format incorrect.")
+ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID = _(
+ "The format of binary %s item is incorrect. "
+ "It should contain at least %d elements.")
+ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID_MAX = _(
+ "The format of binary %s item is invalid, "
+ "it should contain not more than %d elements.")
+ERR_INF_PARSER_BINARY_ITEM_INVALID_FILETYPE = _(
+ "The Binary FileType is incorrect. It should in %s")
+ERR_INF_PARSER_BINARY_ITEM_FILE_NOT_EXIST = _(
+ "The Binary File: %s not exist.")
+ERR_INF_PARSER_BINARY_ITEM_FILENAME_NOT_EXIST = _(
+ "The Binary File Name item not exist")
+ERR_INF_PARSER_BINARY_VER_TYPE = _(
+ "Only this type is allowed: \"%s\".")
+ERR_INF_PARSER_MULTI_DEFINE_SECTION = \
+ _("Multiple define sections found. "
+ "It must be corrected before continuing.")
+ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND = \
+ _("More than 1 %s is defined in DEFINES section. "
+ "It must be corrected before continuing.")
+ERR_INF_PARSER_DEFINE_NAME_INVALID = \
+ _("Incorrect name format for : %s")
+ERR_INF_PARSER_DEFINE_GUID_INVALID = \
+ _("The format of this GUID is incorrect: %s")
+ERR_INF_PARSER_DEFINE_MODULETYPE_INVALID = _("Incorrect MODULE_TYPE: %s")
+ERR_INF_PARSER_DEFINE_FROMAT_INVALID = _("Incorrect format: %s")
+ERR_INF_PARSER_FILE_NOT_EXIST = _("This file does not exist: %s")
+ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID = \
+ _("The file does not exist or not in sub-directories "
+ "or has an incorrect file name of the directory containing the INF or DEC file: %s. "
+ "It must be corrected before continuing")
+ERR_INF_PARSER_DEFINE_SHADOW_INVALID = \
+ _("The SHADOW keyword is only valid for"
+ " SEC, PEI_CORE and PEIM module types.")
+ERR_INF_PARSER_DEFINE_SECTION_HEADER_INVALID = \
+ _("The format of the section header is incorrect")
+ERR_INF_PARSER_DEPEX_SECTION_INVALID = \
+ _("A module can't have a Depex section when its module type is %s")
+ERR_INF_PARSER_DEPEX_SECTION_INVALID_FOR_BASE_LIBRARY_CLASS = \
+ _("A base type library class can't have a Depex section with module type not defined.")
+ERR_INF_PARSER_DEPEX_SECTION_INVALID_FOR_LIBRARY_CLASS = \
+ _("A library class can't have a Depex section when its supported module type list is not defined.")
+ERR_INF_PARSER_DEPEX_SECTION_INVALID_FOR_DRIVER = \
+ _("A driver can't have a Depex section when its module type is UEFI_DRIVER.")
+ERR_INF_PARSER_DEPEX_SECTION_NOT_DETERMINED = \
+ _("Cannot determine the module's Depex type. The Depex's module types are conflict")
+ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST = _(
+ "No %s found in INF file, please check it.")
+ERR_INF_PARSER_DEPEX_SECTION_MODULE_TYPE_ERROR = \
+ _("The module type of [Depex] section is invalid, not support type of %s")
+ERR_INF_PARSER_DEPEX_SECTION_CONTENT_MISSING = \
+ _("Missing content in: %s")
+ERR_INF_PARSER_DEPEX_SECTION_CONTENT_ERROR = \
+ _("The [Depex] section contains invalid content: %s")
+ERR_INF_PARSER_DEPEX_SECTION_SEC_TYPE_ERROR = \
+ _("The format is incorrect. The section type keyword of the content in the"
+ " [Depex] section is only for 'PEI_DEPEX', 'DXE_DEPEX', 'SMM_DEPEX', "
+ "it does not support type: %s")
+ERR_INF_PARSER_UE_SECTION_USER_ID_ERROR = \
+ _("This format is incorrect. "
+ "The UserID: %s in [UserExtension] section is incorrect.")
+ERR_INF_PARSER_UE_SECTION_ID_STRING_ERROR = \
+ _("This format is incorrect. "
+ "IdString: %s in [UserExtension] section is incorrect.")
+ERR_INF_PARSER_LIBRARY_SECTION_CONTENT_ERROR = \
+ _("The format is incorrect. "
+ "You can only have a Library name and a Feature flag in one line.")
+ERR_INF_PARSER_LIBRARY_SECTION_LIBNAME_MISSING = \
+ _("Format invalid. Please specify a library name.")
+ERR_INF_PARSER_SOURCES_SECTION_CONTENT_ERROR = \
+ _("The format is incorrect. It should be formatted as follows: "
+ "FileName, Family | TagName | ToolCode | FeatureFlagExpr.")
+ERR_INF_PARSER_PCD_SECTION_TYPE_ERROR = \
+ _("The PCD section type is incorrect. The value should be this list: %s")
+ERR_INF_PARSER_PCD_SECTION_CONTENT_ERROR = \
+ _("PcdName format invalid."
+ "Should like following: PcdName | Value | FeatureFlag.")
+ERR_INF_PARSER_PCD_NAME_FORMAT_ERROR = \
+ _("Format invalid."
+ "Should like following: <TokenSpaceGuidCName>.<PcdCName> ")
+ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR = \
+ _("The format is incorrect. "
+ "It should be formatted as follows: CName | FeatureFlag.")
+ERR_INF_PARSER_PACKAGE_SECTION_CONTENT_ERROR = \
+ _("The format is incorrect. "
+ "It should be formatted as follows: <TokenSpaceGuidCName>.<PcdCName>")
+ERR_INF_PARSER_PCD_TAIL_COMMENTS_INVALID = \
+ _("The format is incorrect. "
+ "Multiple usage descriptions must be described on subsequent lines.")
+ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR = \
+ _("This section format is incorrect: %s.")
+ERR_INF_PARSER_SECTION_NAME_DUPLICATE = \
+ _("This section has multiple section names, "
+ "only one section name is permitted.")
+ERR_INF_PARSER_SECTION_ARCH_CONFLICT = \
+ _("The 'common' ARCH must not be used with the specified ARCHs.")
+ERR_INF_PARSER_SOURCE_SECTION_TAGNAME_INVALID = \
+ _("This TagName is incorrect: %s. "
+ "It must be corrected before continuing.")
+ERR_INF_PARSER_TAGNAME_NOT_PERMITTED = \
+ _("TagName is not permitted: %s. "
+ "It must be corrected before continuing.")
+ERR_INF_PARSER_TOOLCODE_NOT_PERMITTED = \
+ _("ToolCode is not permitted: %s. "
+ "It must be corrected before continuing.")
+ERR_INF_PARSER_SOURCE_SECTION_FAMILY_INVALID = \
+ _("This family is incorrect: %s. "
+ "It must be corrected before continuing. ")
+ERR_INF_PARSER_SOURCE_SECTION_SECTIONNAME_INVALID = \
+ _("This SectionName is incorrect: %s. "
+ "It must be corrected before continuing.")
+ERR_INF_PARSER_PCD_CVAR_GUID = \
+ _("TokenSpaceGuidCName must be valid C variable format.")
+ERR_INF_PARSER_PCD_CVAR_PCDCNAME = \
+ _("PcdCName must be valid C variable format.")
+ERR_INF_PARSER_PCD_VALUE_INVALID = \
+ _("The PCD value is incorrect. It must be corrected before continuing.")
+ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID = \
+ _("Incorrect feature flag expression: %s")
+ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING = \
+ _("The feature flag expression is missing. Please specify a feature flag.")
+ERR_INF_PARSER_INVALID_CNAME = \
+ _("Incorrect CName: %s. You must specify a valid C variable name.")
+ERR_INF_PARSER_CNAME_MISSING = \
+ _("Missing CName. Specify a valid C variable name.")
+ERR_INF_PARSER_DEFINE_SECTION_KEYWORD_INVALID = \
+ _("The Define section contains an invalid keyword: \"%s\"."
+ "It must be corrected before continuing.")
+ERR_INF_PARSER_FILE_MISS_DEFINE = \
+ _("The following file listed in the module "
+ "directory is not listed in the INF: %s")
+ERR_INF_PARSER_VERSION_NUMBER_DEPRICATED = \
+ _("VERSION_NUMBER deprecated. "
+ "The INF file %s should be modified to use the VERSION_STRING instead.")
+ERR_INF_PARSER_VER_EXIST_BOTH_NUM_STR = \
+ _("The INF file %s defines both VERSION_NUMBER and VERSION_STRING, "
+ "using VERSION_STRING")
+ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF = _("EDKI INF is not supported")
+ERR_INF_PARSER_EDKI_COMMENT_IN_EDKII = _("The EDKI style comment is not supported in EDKII modules")
+
+ERR_INF_PARSER_FEATUREPCD_USAGE_INVALID = _("The usage for FeaturePcd can only"
+ " be type of \"CONSUMES\".")
+
+ERR_INF_PARSER_DEFINE_ITEM_NO_NAME = _("No name specified")
+ERR_INF_PARSER_DEFINE_ITEM_NO_VALUE = _("No value specified")
+
+ERR_INF_PARSER_MODULETYPE_INVALID = _("Drivers and applications are not allowed to have a MODULE_TYPE of \"BASE\". "
+"Only libraries are permitted to a have a MODULE_TYPE of \"BASE\".")
+ERR_INF_GET_PKG_DEPENDENCY_FAIL = _("Failed to get PackageDependencies information from file %s")
+ERR_INF_NO_PKG_DEPENDENCY_INFO = _("There are no packages defined that use the AsBuilt PCD information.")
+
+#
+# Item duplicate
+#
+ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC = \
+_('"%s" is redefined in its dependent DEC files')
+ERR_INF_PARSER_ITEM_DUPLICATE = _("%s define duplicated! "
+ "It must be corrected before continuing.")
+ERR_INF_PARSER_ITEM_DUPLICATE_COMMON = _("%s define duplicated! Item listed"
+"in an architectural section must not be listed in the common architectural"
+"section.It must be corrected before continuing.")
+ERR_INF_PARSER_UE_SECTION_DUPLICATE_ERROR = \
+_("%s define duplicated! Each UserExtensions section header must have a "
+ "unique set of UserId, IdString and Arch values. "
+ "It must be corrected before continuing.")
+
+ERR_INF_PARSER_DEFINE_LIB_NAME_INVALID = \
+_("The name 'NULL' for LibraryClass is a reserved word."
+"Please don't use it.")
+
+ERR_GLOBAL_MARCO_INVALID = \
+_("Using global MACRO in INF/DEC is not permitted: %s . "
+"It must be corrected before continuing.")
+
+ERR_MARCO_DEFINITION_MISS_ERROR = \
+_("MACRO expand incorrectly, can not find the MACRO definition. "
+"It must be corrected before continuing.")
+
+#
+# AsBuilt related
+#
+ERR_LIB_CONTATIN_ASBUILD_AND_COMMON = _("A binary INF file should not contain both AsBuilt LIB_INSTANCES information "
+ "and a common library entry.")
+ERR_LIB_INSTANCE_MISS_GUID = _("Could not get FILE_GUID definition from instance INF file.")
+
+ERR_BO_CONTATIN_ASBUILD_AND_COMMON = _("A binary INF file should contain either AsBuilt information "
+ "or a common build option entry, not both.")
+
+ERR_ASBUILD_PCD_SECTION_TYPE = _("The AsBuilt INF file contains a PCD section type that is not permitted: %s.")
+ERR_ASBUILD_PATCHPCD_FORMAT_INVALID = _("The AsBuilt PatchPcd entry must contain 3 elements: PcdName|Value|Offset")
+ERR_ASBUILD_PCDEX_FORMAT_INVALID = _("The AsBuilt PcdEx entry must contain one element: PcdName")
+ERR_ASBUILD_PCD_VALUE_INVALID = \
+ _("The AsBuilt PCD value %s is incorrect or not align with its datum type %s. "
+ "It must be corrected before continuing.")
+ERR_ASBUILD_PCD_TOKENSPACE_GUID_VALUE_MISS = _("Package file value could not be retrieved for %s.")
+ERR_ASBUILD_PCD_DECLARITION_MISS = _("PCD Declaration in DEC files could not be found for: %s.")
+ERR_ASBUILD_PCD_OFFSET_FORMAT_INVALID = _("PCD offset format invalid, number of (0-4294967295) or"
+"Hex number of UINT32 allowed : %s.")
+
+#
+# XML parser related strings
+#
+ERR_XML_PARSER_REQUIRED_ITEM_MISSING = \
+ _("The XML section/attribute '%s' is required under %s, it can't be missing or empty")
+ERR_XML_INVALID_VARIABLENAME = \
+ _("The VariableName of the GUID in the XML tree does not conform to the packaging specification. "
+ "Only a Hex Byte Array of UCS-2 format or L\"string\" is allowed): %s %s %s")
+ERR_XML_INVALID_LIB_SUPMODLIST = _("The LIBRARY_CLASS entry %s must have the list appended using the format as: \n"
+"BASE SEC PEI_CORE PEIM DXE_CORE DXE_DRIVER SMM_CORE DXE_SMM_DRIVER DXE_RUNTIME_DRIVER "
+"DXE_SAL_DRIVER UEFI_DRIVER UEFI_APPLICATION USER_DEFINED\n Current is %s.")
+ERR_XML_INVALID_EXTERN_SUPARCHLIST = \
+ _("There is a mismatch of SupArchList %s between the EntryPoint, UnloadImage, Constructor, "
+ "and Destructor elements in the ModuleSurfaceArea.ModuleProperties: SupArchList: %s. ")
+ERR_XML_INVALID_EXTERN_SUPMODLIST = _("The SupModList attribute of the CONSTRUCTOR or DESTRUCTOR element: %s does not "
+"match the Supported Module Types listed after LIBRARY_CLASS = <Keyword> | %s")
+ERR_XML_INVALID_EXTERN_SUPMODLIST_NOT_LIB = _("The module is not a library module. "
+ "The MODULE_TYPE : %s listed in the ModuleSurfaceArea.Header "
+ "must match the SupModList attribute %s")
+ERR_XML_INVALID_BINARY_FILE_TYPE = _("Invalid binary file type %s.")
+
+#
+# Verbosity related strings.
+#
+MSG_DISTRIBUTION_PACKAGE_FILE_EXISTS = _(
+ "The distribution package file %s already exists.\nPress Y to override it."
+ " To exit the application, press any other key.")
+MSG_CHECK_MODULE_EXIST = _(
+ "\nChecking to see if module exists in workspace started ...")
+MSG_CHECK_MODULE_EXIST_FINISH = \
+ _("Checking to see if module exists in workspace ... Done.")
+MSG_CHECK_MODULE_DEPEX_START = _(
+ "\nChecking to see if module depex met by workspace started ...")
+MSG_CHECK_MODULE_DEPEX_FINISH = _(
+ "Checking to see if module depex met by workspace ... Done.")
+MSG_CHECK_PACKAGE_START = _(
+ "\nChecking to see if package exists in workspace started ...")
+MSG_CHECK_PACKAGE_FINISH = _(
+ "Checking to see if package exists in workspace ... Done.")
+MSG_CHECK_DP_START = \
+ _("\nChecking to see if DP exists in workspace ... Done.")
+MSG_CHECK_DP_FINISH = _("Check DP exists in workspace ... Done.")
+MSG_MODULE_DEPEND_ON = _("Module %s depends on Package %s")
+MSG_INIT_IPI_START = _("\nInitialize IPI database started ...")
+MSG_INIT_IPI_FINISH = _("Initialize IPI database ... Done.")
+MSG_GET_DP_INSTALL_LIST = _(
+ "\nGetting list of DP install information started ...")
+MSG_GET_DP_INSTALL_INFO_START = _(
+ "\nGetting list of DP install information started ...")
+MSG_GET_DP_INSTALL_INFO_FINISH = _("Getting DP install information ... Done.")
+MSG_UZIP_PARSE_XML = _(
+ "Unzipping and parsing distribution package XML file ... ")
+MSG_INSTALL_PACKAGE = _("Installing package ... %s")
+MSG_INSTALL_MODULE = _("Installing module ... %s")
+MSG_NEW_FILE_NAME_FOR_DIST = _(
+ "Provide new filename for distribution file to be saved:\n")
+MSG_UPDATE_PACKAGE_DATABASE = _("Update Distribution Package Database ...")
+MSG_PYTHON_ON = _("(Python %s on %s) ")
+MSG_EDKII_MAIL_ADDR = 'devel@edk2.groups.io'
+MSG_SEARCH_FOR_HELP = _(
+ "\n(Please send email to %s for\n"
+ " help, attach the following call stack trace.)\n")
+MSG_REMOVE_TEMP_FILE_STARTED = _("Removing temp files started ... ")
+MSG_REMOVE_TEMP_FILE_DONE = _("Removing temp files ... Done.")
+MSG_FINISH = _("Successfully Done.")
+MSG_COMPRESS_DISTRIBUTION_PKG = _("Compressing Distribution Package File ...")
+MSG_CONFIRM_REMOVE = _(
+ "Some packages or modules depend on this distribution package.\n"
+ "Do you really want to remove it?")
+MSG_CONFIRM_REMOVE2 = _(
+ "This file has been modified: %s. Do you want to remove it?"
+ "Press Y to remove or other key to keep it")
+MSG_CONFIRM_REMOVE3 = _(
+ "This is a newly created file: %s. Are you sure you want to remove it? "
+ "Press Y to remove or any other key to keep it")
+MSG_USER_DELETE_OP = _(
+ "Press Y to delete all files or press any other key to quit:")
+MSG_REMOVE_FILE = _("Removing file: %s ...")
+
+MSG_INITIALIZE_ECC_STARTED = _("\nInitialize ECC database started ...")
+MSG_INITIALIZE_ECC_DONE = _("Initialize ECC database ... Done.")
+MSG_DEFINE_STATEMENT_FOUND = _("DEFINE statement '%s' found in section %s")
+MSG_PARSING = _("Parsing %s ...")
+
+MSG_REPKG_CONFLICT = \
+_("Repackaging is not allowed on this file: %s. "
+ "It was installed from distribution %s(Guid %s Version %s).")
+
+MSG_INVALID_MODULE_INTRODUCED = _("Some modules are not valid after removal.")
+MSG_CHECK_LOG_FILE = _("Please check log file %s for full list")
+MSG_NEW_FILE_NAME = _(
+ "Provide new filename:\n")
+MSG_RELATIVE_PATH_ONLY = _("Please specify a relative path, full path is not allowed: %s")
+MSG_NEW_PKG_PATH = _(
+ "Select package location. To quit with no input, press [Enter].")
+MSG_CHECK_DP_FOR_REPLACE = _("Verifying the dependency rule for replacement of distributions:\n %s replaces %s")
+MSG_CHECK_DP_FOR_INSTALL = _("Verifying the dependency rule for installation of distribution:\n %s")
+MSG_REPLACE_ALREADY_INSTALLED_DP = _("Distribution with the same GUID/Version is already installed, "
+ "replace would result in two instances, which is not allowed")
+MSG_RECOVER_START = _('An error was detected, recovery started ...')
+MSG_RECOVER_DONE = _('Recovery completed.')
+MSG_RECOVER_FAIL = _('Recovery failed.')
+#
+# Error related strings.
+#
+
+ERR_DEPENDENCY_NOT_MATCH = _(
+ "Module %s's dependency on package %s (GUID %s Version %s) "
+ "cannot be satisfied")
+ERR_MODULE_NOT_INSTALLED = _(
+ "This module is not installed in the workspace: %s\n")
+ERR_DIR_ALREADY_EXIST = _(
+ "This directory already exists: %s.\n"
+ "Select another location. Press [Enter] with no input to quit:")
+ERR_USER_INTERRUPT = _("The user has paused the application")
+ERR_DIST_FILE_TOOMANY = _(
+ "Only one .content and one .pkg file in ZIP file are allowed.")
+ERR_DIST_FILE_TOOFEW = _(
+ "Must have one .content and one .pkg file in the ZIP file.")
+ERR_FILE_ALREADY_EXIST = _(
+ "This file already exists: %s.\n"
+ "Select another path to continue. To quit with no input press [Enter]:")
+ERR_SPECIFY_PACKAGE = _(
+ "One distribution package must be specified")
+ERR_FILE_BROKEN = _(
+ "This file is invalid in the distribution package: %s")
+ERR_PACKAGE_NOT_MATCH_DEPENDENCY = _(
+ "This distribution package does not meet the dependency requirements")
+ERR_UNKNOWN_FATAL_INSTALL_ERR = \
+_("Unknown unrecoverable error when installing: %s")
+ERR_UNKNOWN_FATAL_REPLACE_ERR = \
+_("Unknown unrecoverable error during replacement of distributions: %s replaces %s")
+ERR_OPTION_NOT_FOUND = _("Options not found")
+ERR_INVALID_PACKAGE_NAME = _("Incorrect package name: %s. ")
+ERR_INVALID_PACKAGE_PATH = \
+_("Incorrect package path: %s. The path must be a relative path.")
+ERR_NOT_FOUND = _("This was not found: %s")
+ERR_INVALID_MODULE_NAME = _("This is not a valid module name: %s")
+ERR_INVALID_METAFILE_PATH = _('This file must be in sub-directory of WORKSPACE: %s.')
+ERR_INVALID_MODULE_PATH = \
+_("Incorrect module path: %s. The path must be a relative path.")
+ERR_UNKNOWN_FATAL_CREATING_ERR = _("Unknown error when creating: %s")
+ERR_PACKAGE_NOT_INSTALLED = _(
+ "This distribution package not installed: %s")
+ERR_DISTRIBUTION_NOT_INSTALLED = _(
+ "The distribution package is not installed.")
+ERR_UNKNOWN_FATAL_REMOVING_ERR = _("Unknown error when removing package")
+ERR_UNKNOWN_FATAL_INVENTORYWS_ERR = _("Unknown error when inventorying WORKSPACE")
+ERR_NOT_CONFIGURE_WORKSPACE_ENV = _(
+ "The WORKSPACE environment variable must be configured.")
+ERR_NO_TEMPLATE_FILE = _("This package information data file is not found: %s")
+ERR_DEBUG_LEVEL = _(
+ "Not supported debug level. Use default level instead.")
+ERR_REQUIRE_T_OPTION = _(
+ "Option -t is required during distribution creation.")
+ERR_REQUIRE_O_OPTION = _(
+ "Option -o is required during distribution replacement.")
+ERR_REQUIRE_U_OPTION = _(
+ "Option -u is required during distribution replacement.")
+ERR_REQUIRE_I_C_R_OPTION = _(
+ "Options -i, -c and -r are mutually exclusive.")
+ERR_I_C_EXCLUSIVE = \
+_("Option -c and -i are mutually exclusive.")
+ERR_I_R_EXCLUSIVE = \
+_("Option -i and -r are mutually exclusive.")
+ERR_C_R_EXCLUSIVE = \
+_("Option -c and -r are mutually exclusive.")
+ERR_U_ICR_EXCLUSIVE = \
+_("Option -u and -c/-i/-r are mutually exclusive.")
+
+ERR_L_OA_EXCLUSIVE = \
+_("Option -l and -c/-i/-r/-u are mutually exclusive.")
+
+ERR_FAILED_LOAD = _("Failed to load %s\n\t%s")
+ERR_PLACEHOLDER_DIFFERENT_REPEAT = _(
+ "${%s} has different repeat time from others.")
+ERR_KEY_NOTALLOWED = _("This keyword is not allowed: %s")
+ERR_NOT_FOUND_ENVIRONMENT = _("Environment variable not found")
+ERR_WORKSPACE_NOTEXIST = _("WORKSPACE doesn't exist")
+ERR_SPACE_NOTALLOWED = _(
+ "Whitespace characters are not allowed in the WORKSPACE path. ")
+ERR_MACRONAME_NOGIVEN = _("No MACRO name given")
+ERR_MACROVALUE_NOGIVEN = _("No MACRO value given")
+ERR_MACRONAME_INVALID = _("Incorrect MACRO name: %s")
+ERR_MACROVALUE_INVALID = _("Incorrect MACRO value: %s")
+ERR_NAME_ONLY_DEFINE = _(
+ "This variable can only be defined via environment variable: %s")
+ERR_EDK_GLOBAL_SAMENAME = _(
+ "EDK_GLOBAL defined a macro with the same name as one defined by 'DEFINE'")
+ERR_SECTIONNAME_INVALID = _(
+ "An incorrect section name was found: %s. 'The correct file is '%s' .")
+ERR_CHECKFILE_NOTFOUND = _(
+ "Can't find file '%s' defined in section '%s'")
+ERR_INVALID_NOTFOUND = _(
+ "Incorrect statement '%s' was found in section '%s'")
+ERR_TEMPLATE_NOTFOUND = _("This package information data file is not found: %s")
+ERR_SECTION_NAME_INVALID = _('Incorrect section name: %s')
+ERR_SECTION_REDEFINE = _(
+ "This section already defined: %s.")
+ERR_SECTION_NAME_NONE = \
+ _('The section needs to be specified first.')
+ERR_KEYWORD_INVALID = _('Invalid keyword: %s')
+ERR_VALUE_INVALID = _("Invalid \"%s\" value in section [%s].")
+ERR_FILELIST_LOCATION = _(
+ 'The directory "%s" must contain this file: "%s".')
+ERR_KEYWORD_REDEFINE = _(
+ "Keyword in this section can only be used once: %s.")
+ERR_FILELIST_EXIST = _(
+ 'This file does not exist: %s.')
+ERR_COPYRIGHT_CONTENT = _(
+ "The copyright content must contain the word \"Copyright\" (case insensitive).")
+ERR_WRONG_FILELIST_FORMAT = \
+_('File list format is incorrect.'
+ 'The correct format is: filename|key=value[|key=value]')
+ERR_FILELIST_ATTR = _(
+ "The value of attribute \"%s\" includes illegal character.")
+ERR_UNKNOWN_FILELIST_ATTR = _(
+ 'Unknown attribute name: %s.')
+ERR_EMPTY_VALUE = _("Empty value is not allowed")
+ERR_KEYWORD_MANDATORY = _('This keyword is mandatory: %s')
+ERR_BOOLEAN_VALUE = _(
+ 'Value of key [%s] must be true or false, current: [%s]')
+ERR_GUID_VALUE = _(
+ 'GUID must have the format of 8-4-4-4-12 with HEX value. '
+ 'Current value: [%s]')
+ERR_VERSION_VALUE = _(
+ 'The value of key [%s] must be a decimal number. Found: [%s]')
+ERR_VERSION_XMLSPEC = _(
+ 'XmlSpecification value must be 1.1, current: %s.')
+
+ERR_INVALID_GUID = _("Incorrect GUID value string: %s")
+
+ERR_FILE_NOT_FOUND = \
+ _("File or directory not found in workspace")
+ERR_FILE_OPEN_FAILURE = _("Could not open file")
+ERR_FILE_WRITE_FAILURE = _("Could not write file.")
+ERR_FILE_PARSE_FAILURE = _("Could not parse file")
+ERR_FILE_READ_FAILURE = _("Could not read file")
+ERR_FILE_CREATE_FAILURE = _("Could not create file")
+ERR_FILE_CHECKSUM_FAILURE = _("Checksum of file is incorrect")
+ERR_FILE_COMPRESS_FAILURE = _("File compression did not correctly")
+ERR_FILE_DECOMPRESS_FAILURE = \
+ _("File decompression did not complete correctly")
+ERR_FILE_MOVE_FAILURE = _("Move file did not complete successfully")
+ERR_FILE_DELETE_FAILURE = _("File could not be deleted")
+ERR_FILE_COPY_FAILURE = _("File did not copy correctly")
+ERR_FILE_POSITIONING_FAILURE = _("Could not find file seek position")
+ERR_FILE_TYPE_MISMATCH = _("Incorrect file type")
+ERR_FILE_CASE_MISMATCH = _("File name case mismatch")
+ERR_FILE_DUPLICATED = _("Duplicate file found")
+ERR_FILE_UNKNOWN_ERROR = _("Unknown error encountered on file")
+ERR_FILE_NAME_INVALIDE = _("This file name is invalid, it must not be an absolute path or "
+ "contain a period \".\" or \"..\": %s.")
+ERR_OPTION_UNKNOWN = _("Unknown option")
+ERR_OPTION_MISSING = _("Missing option")
+ERR_OPTION_CONFLICT = _("Options conflict")
+ERR_OPTION_VALUE_INVALID = _("Invalid option value")
+ERR_OPTION_DEPRECATED = _("Deprecated option")
+ERR_OPTION_NOT_SUPPORTED = _("Unsupported option")
+ERR_OPTION_UNKNOWN_ERROR = _("Unknown error when processing options")
+ERR_PARAMETER_INVALID = _("Invalid parameter")
+ERR_PARAMETER_MISSING = _("Missing parameter")
+ERR_PARAMETER_UNKNOWN_ERROR = _("Unknown error in parameters")
+ERR_FORMAT_INVALID = _("Invalid syntax/format")
+ERR_FORMAT_NOT_SUPPORTED = _("Syntax/format not supported")
+ERR_FORMAT_UNKNOWN = _("Unknown format")
+ERR_FORMAT_UNKNOWN_ERROR = _("Unknown error in syntax/format ")
+ERR_RESOURCE_NOT_AVAILABLE = _("Not available")
+ERR_RESOURCE_ALLOCATE_FAILURE = _("A resource allocation has failed")
+ERR_RESOURCE_FULL = _("Full")
+ERR_RESOURCE_OVERFLOW = _("Overflow")
+ERR_RESOURCE_UNDERRUN = _("Underrun")
+ERR_RESOURCE_UNKNOWN_ERROR = _("Unknown error")
+ERR_ATTRIBUTE_NOT_AVAILABLE = _("Not available")
+ERR_ATTRIBUTE_RETRIEVE_FAILURE = _("Unable to retrieve")
+ERR_ATTRIBUTE_SET_FAILURE = _("Unable to set")
+ERR_ATTRIBUTE_UPDATE_FAILURE = _("Unable to update")
+ERR_ATTRIBUTE_ACCESS_DENIED = _("Access denied")
+ERR_ATTRIBUTE_UNKNOWN_ERROR = _("Unknown error when accessing")
+ERR_COMMAND_FAILURE = _("Unable to execute command")
+ERR_IO_NOT_READY = _("Not ready")
+ERR_IO_BUSY = _("Busy")
+ERR_IO_TIMEOUT = _("Timeout")
+ERR_IO_UNKNOWN_ERROR = _("Unknown error in IO operation")
+ERR_UNKNOWN_ERROR = _("Unknown error")
+ERR_UPT_ALREADY_INSTALLED_ERROR = _("Already installed")
+ERR_UPT_ENVIRON_MISSING_ERROR = _("Environ missing")
+ERR_UPT_REPKG_ERROR = _("File not allowed for RePackage")
+ERR_UPT_DB_UPDATE_ERROR = _("Update database did not complete successfully")
+ERR_UPT_INI_PARSE_ERROR = _("INI file parse error")
+ERR_COPYRIGHT_MISSING = \
+_("Header comment section must have copyright information")
+ERR_LICENSE_MISSING = \
+_("Header comment section must have license information")
+ERR_INVALID_BINARYHEADER_FORMAT = \
+_("Binary Header comment section must have abstract,description,copyright,license information")
+ERR_MULTIPLE_BINARYHEADER_EXIST = \
+_("the inf file at most support one BinaryHeader at the fileheader section.")
+ERR_INVALID_COMMENT_FORMAT = _("Comment must start with #")
+ERR_USER_ABORT = _("User has stopped the application")
+ERR_DIST_EXT_ERROR = \
+_("Distribution file extension should be '.dist'. Current given: '%s'.")
+ERR_DIST_FILENAME_ONLY_FOR_REMOVE = \
+_("Only distribution filename without path allowed during remove. Current given: '%s'.")
+ERR_NOT_STANDALONE_MODULE_ERROR = \
+ _("Module %s is not a standalone module (found in Package %s)")
+ERR_UPT_ALREADY_RUNNING_ERROR = \
+ _("UPT is already running, only one instance is allowed")
+ERR_MUL_DEC_ERROR = _("Multiple DEC files found within one package directory tree %s: %s, %s")
+ERR_INSTALL_FILE_FROM_EMPTY_CONTENT = _("Error file to be installed is not found in content file: %s")
+ERR_INSTALL_FILE_DEC_FILE_ERROR = _("Could not obtain the TokenSpaceGuidCName and the PcdCName from the DEC files "
+"that the package depends on for this pcd entry: TokenValue: %s Token: %s")
+ERR_NOT_SUPPORTED_SA_MODULE = _("Stand-alone module distribution does not allow EDK 1 INF")
+ERR_INSTALL_DIST_NOT_FOUND = \
+_("Distribution file to be installed is not found in current working directory or workspace: %s")
+ERR_REPLACE_DIST_NOT_FOUND = \
+_("Distribution file for replace function was not found in the current working directory or workspace: %s")
+ERR_DIST_FILENAME_ONLY_FOR_REPLACE_ORIG = \
+_("Only a distribution file name without a path is allowed for "
+ "the distribution to be replaced during replace. Current given: '%s'.")
+ERR_UNIPARSE_DBLQUOTE_UNMATCHED = \
+_("Only Language entry can contain a couple of matched quote in one line")
+ERR_UNIPARSE_NO_SECTION_EXIST = _("No PackageDef or ModuleDef section exists in the UNI file.")
+ERR_UNIPARSE_STRNAME_FORMAT_ERROR = _("The String Token Name %s must start with \"STR_\"")
+ERR_UNIPARSE_SEP_LANGENTRY_LINE = _("Each <LangEntry> should be in a separate line :%s.")
+ERR_UNIPARSE_MULTI_ENTRY_EXIST = \
+_("There are same entries : %s in the UNI file, every kind of entry should be only one.")
+ERR_UNIPARSE_ENTRY_ORDER_WRONG = \
+_("The string entry order in UNI file should be <AbstractStrings>, <DescriptionStrings>, \
+<BinaryAbstractStrings>, <BinaryDescriptionStrings>.")
+ERR_UNIPARSE_STRTOKEN_FORMAT_ERROR = _("The String Token Type %s must be one of the '_PROMPT', '_HELP' and '_ERR_'.")
+ERR_UNIPARSE_LINEFEED_UNDER_EXIST = _("Line feed should not exist under this line: %s.")
+ERR_UNIPARSE_LINEFEED_UP_EXIST = _("Line feed should not exist up this line: %s.")
+ERR_UNI_MISS_STRING_ENTRY = _("String entry missed in this Entry, %s.")
+ERR_UNI_MISS_LANGENTRY = _("Language entry missed in this Entry, %s.")
+ERR_BINARY_HEADER_ORDER = _("Binary header must follow the file header.")
+ERR_NO_SOURCE_HEADER = _("File header statement \"## @file\" must exist at the first place.")
+ERR_UNI_FILE_SUFFIX_WRONG = _("The UNI file must have an extension of '.uni', '.UNI' or '.Uni'")
+ERR_UNI_FILE_NAME_INVALID = _("The use of '..', '../' and './' in the UNI file is prohibited.")
+ERR_UNI_SUBGUID_VALUE_DEFINE_DEC_NOT_FOUND = _("There are no DEC file to define the GUID value for \
+this GUID CName: '%s'.")
+
+#
+# Expression error message
+#
+ERR_EXPR_RIGHT_PAREN = \
+_('Missing ")" in expression "%s".')
+ERR_EXPR_FACTOR = \
+_('"%s" is expected to be HEX, integer, macro, quoted string or PcdName in '
+ 'expression "%s".')
+ERR_EXPR_STRING_ITEM = \
+_('"%s" is expected to be HEX, integer, macro, quoted string or PcdName in '
+ 'expression [%s].')
+ERR_EXPR_EQUALITY = \
+_('"%s" is expected to be ==, EQ, != or NE in expression "%s".')
+ERR_EXPR_BOOLEAN = \
+_('The string "%s" in expression "%s" can not be recognized as a part of the logical expression.')
+ERR_EXPR_EMPTY = _('Boolean value cannot be empty.')
+ERR_EXPRESS_EMPTY = _('Expression can not be empty.')
+ERR_EXPR_LOGICAL = \
+_('The following is not a valid logical expression: "%s".')
+ERR_EXPR_OR = _('The expression: "%s" must be encapsulated in open "(" and close ")" '
+ 'parenthesis when using | or ||.')
+ERR_EXPR_RANGE = \
+_('The following is not a valid range expression: "%s".')
+ERR_EXPR_RANGE_FACTOR = \
+_('"%s" is expected to be HEX, integer in valid range expression "%s".')
+ERR_EXPR_RANGE_DOUBLE_PAREN_NESTED = \
+_('Double parentheses nested is not allowed in valid range expression: "%s".')
+ERR_EXPR_RANGE_EMPTY = _('Valid range can not be empty.')
+ERR_EXPR_LIST_EMPTY = _('Valid list can not be empty.')
+ERR_PAREN_NOT_USED = _('Parenthesis must be used on both sides of "OR", "AND" in valid range : %s.')
+ERR_EXPR_LIST = \
+_('The following is not a valid list expression: "%s".')
+
+
+# DEC parser error message
+#
+ERR_DECPARSE_STATEMENT_EMPTY = \
+_('Must have at least one statement in section %s.')
+ERR_DECPARSE_DEFINE_DEFINED = \
+_('%s already defined in define section.')
+ERR_DECPARSE_DEFINE_SECNAME = \
+_('No arch and others can be followed for define section.')
+ERR_DECPARSE_DEFINE_MULTISEC = \
+_('The DEC file does not allow multiple define sections.')
+ERR_DECPARSE_DEFINE_REQUIRED = \
+_("Field [%s] is required in define section.")
+ERR_DECPARSE_DEFINE_FORMAT = \
+_("Wrong define section format, must be KEY = Value.")
+ERR_DECPARSE_DEFINE_UNKNOWKEY = \
+_("Unknown key [%s] in define section.")
+ERR_DECPARSE_DEFINE_SPEC = \
+_("Specification value must be HEX numbers or decimal numbers.")
+ERR_DECPARSE_DEFINE_PKGNAME = \
+_("Package name must be AlphaNumeric characters.")
+ERR_DECPARSE_DEFINE_PKGGUID = \
+_("GUID format error, must be HEX value with form 8-4-4-4-12.")
+ERR_DECPARSE_DEFINE_PKGVERSION = \
+_("Version number must be decimal number.")
+ERR_DECPARSE_DEFINE_PKGVUNI = \
+_("UNI file name format error or file does not exist.")
+ERR_DECPARSE_INCLUDE = \
+_("Incorrect path: [%s].")
+ERR_DECPARSE_LIBCLASS_SPLIT = \
+_("Library class format error, must be Libraryclass|Headerpath.")
+ERR_DECPARSE_LIBCLASS_EMPTY = \
+_("Class name or file name must not be empty.")
+ERR_DECPARSE_LIBCLASS_LIB = \
+_("Class name format error, must start with upper case letter followed with "
+ "zero or more alphanumeric characters.")
+ERR_DECPARSE_LIBCLASS_PATH_EXT = _("File name must be end with .h.")
+ERR_DECPARSE_LIBCLASS_PATH_DOT = _("Path must not include '..'.")
+ERR_DECPARSE_LIBCLASS_PATH_EXIST = _("File name [%s] does not exist.")
+ERR_DECPARSE_PCD_CVAR_GUID = \
+_("TokenSpaceGuidCName must be valid C variable format.")
+ERR_DECPARSE_PCD_SPLIT = \
+_("Incorrect PcdName. The format must be TokenSpaceGuidCName.PcdCName"
+ "|PcdData|PcdType|Token.")
+ERR_DECPARSE_PCD_NAME = \
+_("Incorrect PCD name. The correct format must be "
+ "<TokenSpaceGuidCName>.<PcdCName>.")
+ERR_DECPARSE_PCD_CVAR_PCDCNAME = \
+_("PcdCName must be valid C variable format.")
+ERR_DECPARSE_PCD_TYPE = \
+_('Incorrect PCD data type. A PCD data type must be one of '
+ '"UINT8", "UINT16", "UINT32", "UINT64", "VOID*", "BOOLEAN".')
+ERR_DECPARSE_PCD_VOID = \
+_("Incorrect value [%s] of type [%s]. Value must be printable and in the "
+ "form of{...} for array, or ""..."" for string, or L""..."""
+ "for unicode string.")
+ERR_DECPARSE_PCD_VALUE_EMPTY = \
+_("Pcd value can not be empty.")
+ERR_DECPARSE_PCD_BOOL = \
+_("Invalid value [%s] of type [%s]; must be expression, TRUE, FALSE, 0 or 1.")
+ERR_DECPARSE_PCD_INT = _("Incorrect value [%s] of type [%s]."\
+" Value must be a hexadecimal, decimal or octal in C language format.")
+ERR_DECPARSE_PCD_INT_NEGTIVE = _("Incorrect value [%s] of type [%s];"
+ " must not be signed number.")
+ERR_DECPARSE_PCD_INT_EXCEED = _("Incorrect value [%s] of type [%s]; "
+ "the number is too long for this type.")
+ERR_DECPARSE_PCD_FEATUREFLAG = \
+_("PcdFeatureFlag only allow BOOLEAN type.")
+ERR_DECPARSE_PCD_TOKEN = \
+_("An incorrect PCD token found: [%s]. "
+ "It must start with 0x followed by 1 - 8 hexadecimal. ")
+ERR_DECPARSE_PCD_TOKEN_INT = _("Incorrect token number [%s]. "
+ "This token number exceeds the maximal value of unsigned 32.")
+ERR_DECPARSE_PCD_TOKEN_UNIQUE = _("Token number must be unique to the token space: %s.")
+ERR_DECPARSE_CGUID = \
+_("No GUID name or value specified, must be <CName> = <GuidValueInCFormat>.")
+ERR_DECPARSE_CGUID_NAME = \
+_("No GUID name specified, must be <CName> = <GuidValueInCFormat>.")
+ERR_DECPARSE_CGUID_GUID = \
+_("No GUID value specified, must be <CName> = <GuidValueInCFormat>.")
+ERR_DECPARSE_CGUID_GUIDFORMAT = \
+_("Incorrect GUID value format, must be <GuidValueInCFormat:"
+ "{8,4,4,{2,2,2,2,2,2,2,2}}>.")
+ERR_DECPARSE_CGUID_NOT_FOUND = _("Unable to find the GUID value of this GUID CName : '%s'.")
+ERR_DECPARSE_FILEOPEN = _("Unable to open: [%s].")
+ERR_DECPARSE_SECTION_EMPTY = _("Empty sections are not allowed.")
+ERR_DECPARSE_SECTION_UE = _("Incorrect UserExtensions format. "
+ "Must be UserExtenxions.UserId.IdString[.Arch]+.")
+ERR_DECPARSE_SECTION_UE_USERID = _("Invalid UserId, must be underscore"
+ "or alphanumeric characters.")
+ERR_DECPARSE_SECTION_UE_IDSTRING = \
+ _("Incorrect IdString, must be \" ... \".")
+ERR_DECPARSE_ARCH = \
+_("Unknown arch, must be 'common' or start with upper case letter followed by"
+ " zero or more upper case letters and numbers.")
+ERR_DECPARSE_SECTION_COMMA = _("Section cannot end with comma.")
+ERR_DECPARSE_SECTION_COMMON = \
+_("'COMMON' must not be used with specific ARCHs in the same section.")
+ERR_DECPARSE_SECTION_IDENTIFY = \
+_("Section header must start with and end with brackets[].")
+ERR_DECPARSE_SECTION_SUBEMPTY = \
+_("Missing a sub-section name in section: [%s]. "
+ "All sub-sections need to have names. ")
+ERR_DECPARSE_SECTION_SUBTOOMANY = _("Too many DOT splits in [%s].")
+ERR_DECPARSE_SECTION_UNKNOW = _("Section name [%s] unknown.")
+ERR_DECPARSE_SECTION_FEATUREFLAG = \
+_("[%s] must not be in the same section as other types of PCD.")
+ERR_DECPARSE_MACRO_PAIR = _("No macro name/value given.")
+ERR_DECPARSE_MACRO_NAME = _("No macro name given.")
+ERR_DECPARSE_MACRO_NAME_UPPER = \
+_("Macro name must start with upper case letter followed "
+"by zero or more upper case letters or numbers. Current macro name is: [%s].")
+ERR_DECPARSE_SECTION_NAME = \
+_('Cannot mix different section names %s.')
+ERR_DECPARSE_BACKSLASH = \
+_('Backslash must be the last character on a line and '
+ 'preceded by a space character.')
+ERR_DECPARSE_BACKSLASH_EMPTY = \
+_('Empty line after previous line that has backslash is not allowed.')
+ERR_DECPARSE_REDEFINE = _(
+ "\"%s\" already defined in line %d.")
+ERR_DECPARSE_MACRO_RESOLVE = _("Macro %s in %s cannot be resolved.")
+ERR_DECPARSE_UE_DUPLICATE = \
+ _("Duplicated UserExtensions header found.")
+ERR_DECPARSE_PCDERRORMSG_MISS_VALUE_SPLIT = \
+ _("Missing '|' between Pcd's error code and Pcd's error message.")
+ERR_DECPARSE_PCD_MISS_ERRORMSG = \
+ _("Missing Pcd's error message.")
+ERR_DECPARSE_PCD_UNMATCHED_ERRORCODE = \
+ _("There is no error message matched with this Pcd error code : %s in both DEC and UNI file.")
+ERR_DECPARSE_PCD_NODEFINED = _("The PCD : %s used in the Expression is undefined.")
+#
+# Used to print the current line content which cause error raise.
+# Be attached to the end of every error message above.
+#
+ERR_DECPARSE_LINE = _(" Parsing line: \"%s\".")
+
+#
+# Warning related strings.
+#
+WRN_PACKAGE_EXISTED = _(
+ "A package with this GUID and Version already exists: "
+ "GUID %s, Version %s.")
+WRN_MODULE_EXISTED = _("This module already exists: %s")
+WRN_FILE_EXISTED = _("This file already exists: %s")
+WRN_FILE_NOT_OVERWRITTEN = \
+_("This file already exist and cannot be overwritten: %s")
+WRN_DIST_PKG_INSTALLED = _("This distribution package %s has previously been installed.")
+WRN_DIST_NOT_FOUND = _(
+ "Distribution is not found at location %s")
+WRN_MULTI_PCD_RANGES = _(
+ "A PCD can only have one type of @ValidRange, @ValidList, and @Expression comment")
+WRN_MULTI_PCD_VALIDVALUE = _(
+ "A PCD can only have one of @ValidList comment")
+WRN_MULTI_PCD_PROMPT = _(
+ "A PCD can only have one of @Prompt comment")
+WRN_MISSING_USAGE = _("Missing usage")
+WRN_INVALID_GUID_TYPE = _("This is and incorrect Guid type: %s")
+WRN_MISSING_GUID_TYPE = _("Missing Guid Type")
+WRN_INVALID_USAGE = _("This is an incorrect Usage: %s")
+WRN_INF_PARSER_MODULE_INVALID_HOB_TYPE = \
+ _("This is an incorrect HOB type: %s")
+WRN_INF_PARSER_MODULE_INVALID_EVENT_TYPE = \
+ _("This is an incorrect EVENT type: %s")
+WRN_INF_PARSER_MODULE_INVALID_BOOTMODE_TYPE = \
+ _("This is an incorrect BOOTMODE type: %s")
+WRN_INVALID_MODULE_TYPE = \
+ _("This is an incorrect Module type: %s")
+WRN_MODULE_PARSE_FAILED = \
+ _("Parsing of this module did not complete correctly: %s.")
+WRN_EDK1_INF_FOUND = \
+ _("EDK 1 module file found: %s")
+WRN_INVALID_COPYRIGHT = \
+ _("Copyright information is not right")
+WARN_SPECIAL_SECTION_LOCATION_WRONG = _("Warning. A special section should be "
+ "at the end of a file or at the end of a section.")
+WARN_INSTALLED_PACKAGE_NOT_FOUND = \
+ _("File not found. The DEC file for a package cannot be found in GUID/Version/Install path: %s %s %s")
+WARN_CUSTOMPATH_OVERRIDE_USEGUIDEDPATH = \
+ _("option selection of --custom-path will override the option --use-guided-paths")
+
+#
+# Help related strings.
+#
+HLP_PRINT_DEBUG_INFO = _(
+ "Print DEBUG statements, where DEBUG_LEVEL is 0-9")
+HLP_PRINT_INFORMATIONAL_STATEMENT = _("Print informational statements")
+HLP_RETURN_NO_DISPLAY = _(
+ "Returns only the exit code, informational and error messages are"
+ " not displayed")
+HLP_RETURN_AND_DISPLAY = _(
+ "Returns the exit code and displays error messages only")
+HLP_SPECIFY_PACKAGE_NAME_INSTALL = _(
+ "Specify the UEFI Distribution Package filename to install")
+HLP_SPECIFY_PACKAGE_NAME_CREATE = _(
+ "Specify the UEFI Distribution Package filename to create")
+HLP_SPECIFY_PACKAGE_NAME_REMOVE = _(
+ "Specify the UEFI Distribution Package filename to remove")
+HLP_SPECIFY_TEMPLATE_NAME_CREATE = _(
+ "Specify Package Information Data filename to create package")
+HLP_SPECIFY_DEC_NAME_CREATE = _(
+ "Specify dec file names to create package")
+HLP_SPECIFY_INF_NAME_CREATE = _(
+ "Specify inf file names to create package")
+HLP_LIST_DIST_INSTALLED = _(
+ "List the UEFI Distribution Packages that have been installed")
+HLP_NO_SUPPORT_GUI = _(
+ "Starting the tool in graphical mode is not supported in this version")
+HLP_DISABLE_PROMPT = _(
+ "Disable user prompts for removing modified files. Valid only when -r is present")
+HLP_CUSTOM_PATH_PROMPT = _(
+ "Enable user prompting for alternate installation directories")
+HLP_SKIP_LOCK_CHECK = _(
+ "Skip the check for multiple instances")
+HLP_SPECIFY_PACKAGE_NAME_REPLACE = _(
+ "Specify the UEFI Distribution Package file name to replace the existing file name")
+HLP_SPECIFY_PACKAGE_NAME_TO_BE_REPLACED = _(
+ "Specify the UEFI Distribution Package file name to be replaced")
+HLP_USE_GUIDED_PATHS = _(
+ "Install packages to the following directory path by default: <PackageName>_<PACKAGE_GUID>_<PACKAGE_VERSION>")
+HLP_TEST_INSTALL = _(
+ "Specify the UEFI Distribution Package filenames to install")
+
+MSG_TEST_INSTALL_PASS = _("All distribution package file are satisfied for dependence check.")
+MSG_TEST_INSTALL_FAIL = _("NOT all distribution package file are satisfied for dependence check.")
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/ToolError.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/ToolError.py
new file mode 100644
index 00000000..f4cf1989
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/ToolError.py
@@ -0,0 +1,171 @@
+## @file
+# Standardized Error Handling infrastructures.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+ToolError
+'''
+
+import Logger.StringTable as ST
+
+FILE_OPEN_FAILURE = 1
+FILE_WRITE_FAILURE = 2
+FILE_PARSE_FAILURE = 3
+FILE_READ_FAILURE = 4
+FILE_CREATE_FAILURE = 5
+FILE_CHECKSUM_FAILURE = 6
+FILE_COMPRESS_FAILURE = 7
+FILE_DECOMPRESS_FAILURE = 8
+FILE_MOVE_FAILURE = 9
+FILE_DELETE_FAILURE = 10
+FILE_COPY_FAILURE = 11
+FILE_POSITIONING_FAILURE = 12
+FILE_ALREADY_EXIST = 13
+FILE_NOT_FOUND = 14
+FILE_TYPE_MISMATCH = 15
+FILE_CASE_MISMATCH = 16
+FILE_DUPLICATED = 17
+FILE_UNKNOWN_ERROR = 0x0FFF
+
+OPTION_UNKNOWN = 0x1000
+OPTION_MISSING = 0x1001
+OPTION_CONFLICT = 0x1002
+OPTION_VALUE_INVALID = 0x1003
+OPTION_DEPRECATED = 0x1004
+OPTION_NOT_SUPPORTED = 0x1005
+OPTION_UNKNOWN_ERROR = 0x1FFF
+
+PARAMETER_INVALID = 0x2000
+PARAMETER_MISSING = 0x2001
+PARAMETER_UNKNOWN_ERROR = 0x2FFF
+
+FORMAT_INVALID = 0x3000
+FORMAT_NOT_SUPPORTED = 0x3001
+FORMAT_UNKNOWN = 0x3002
+FORMAT_UNKNOWN_ERROR = 0x3FFF
+
+RESOURCE_NOT_AVAILABLE = 0x4000
+RESOURCE_ALLOCATE_FAILURE = 0x4001
+RESOURCE_FULL = 0x4002
+RESOURCE_OVERFLOW = 0x4003
+RESOURCE_UNDERRUN = 0x4004
+RESOURCE_UNKNOWN_ERROR = 0x4FFF
+
+ATTRIBUTE_NOT_AVAILABLE = 0x5000
+ATTRIBUTE_GET_FAILURE = 0x5001
+ATTRIBUTE_SET_FAILURE = 0x5002
+ATTRIBUTE_UPDATE_FAILURE = 0x5003
+ATTRIBUTE_ACCESS_DENIED = 0x5004
+ATTRIBUTE_RETRIEVE_FAILURE = 0x5005
+ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF
+ATTRIBUTE_RETRIEVE_FAILURE = 0x5F00
+
+IO_NOT_READY = 0x6000
+IO_BUSY = 0x6001
+IO_TIMEOUT = 0x6002
+IO_UNKNOWN_ERROR = 0x6FFF
+
+COMMAND_FAILURE = 0x7000
+
+CODE_ERROR = 0xC0DE
+
+AUTOGEN_ERROR = 0xF000
+PARSER_ERROR = 0xF001
+BUILD_ERROR = 0xF002
+GENFDS_ERROR = 0xF003
+ECC_ERROR = 0xF004
+EOT_ERROR = 0xF005
+DDC_ERROR = 0xF009
+WARNING_AS_ERROR = 0xF006
+MIGRATION_ERROR = 0xF010
+EDK1_INF_ERROR = 0xF011
+ABORT_ERROR = 0xFFFE
+UNKNOWN_ERROR = 0xFFFF
+
+UPT_ALREADY_INSTALLED_ERROR = 0xD000
+UPT_ENVIRON_MISSING_ERROR = 0xD001
+UPT_REPKG_ERROR = 0xD002
+UPT_ALREADY_RUNNING_ERROR = 0xD003
+UPT_MUL_DEC_ERROR = 0xD004
+UPT_DB_UPDATE_ERROR = 0xD005
+UPT_INI_PARSE_ERROR = 0xE000
+
+## Error message of each error code
+#
+gERROR_MESSAGE = {
+ FILE_NOT_FOUND : ST.ERR_FILE_NOT_FOUND,
+ FILE_OPEN_FAILURE : ST.ERR_FILE_OPEN_FAILURE,
+ FILE_WRITE_FAILURE : ST.ERR_FILE_WRITE_FAILURE,
+ FILE_PARSE_FAILURE : ST.ERR_FILE_PARSE_FAILURE,
+ FILE_READ_FAILURE : ST.ERR_FILE_READ_FAILURE,
+ FILE_CREATE_FAILURE : ST.ERR_FILE_CREATE_FAILURE,
+ FILE_CHECKSUM_FAILURE : ST.ERR_FILE_CHECKSUM_FAILURE,
+ FILE_COMPRESS_FAILURE : ST.ERR_FILE_COMPRESS_FAILURE,
+ FILE_DECOMPRESS_FAILURE : ST.ERR_FILE_DECOMPRESS_FAILURE,
+ FILE_MOVE_FAILURE : ST.ERR_FILE_MOVE_FAILURE,
+ FILE_DELETE_FAILURE : ST.ERR_FILE_DELETE_FAILURE,
+ FILE_COPY_FAILURE : ST.ERR_FILE_COPY_FAILURE,
+ FILE_POSITIONING_FAILURE: ST.ERR_FILE_POSITIONING_FAILURE,
+ FILE_ALREADY_EXIST : ST.ERR_FILE_ALREADY_EXIST,
+ FILE_TYPE_MISMATCH : ST.ERR_FILE_TYPE_MISMATCH ,
+ FILE_CASE_MISMATCH : ST.ERR_FILE_CASE_MISMATCH,
+ FILE_DUPLICATED : ST.ERR_FILE_DUPLICATED,
+ FILE_UNKNOWN_ERROR : ST.ERR_FILE_UNKNOWN_ERROR,
+
+ OPTION_UNKNOWN : ST.ERR_OPTION_UNKNOWN,
+ OPTION_MISSING : ST.ERR_OPTION_MISSING,
+ OPTION_CONFLICT : ST.ERR_OPTION_CONFLICT,
+ OPTION_VALUE_INVALID : ST.ERR_OPTION_VALUE_INVALID,
+ OPTION_DEPRECATED : ST.ERR_OPTION_DEPRECATED,
+ OPTION_NOT_SUPPORTED : ST.ERR_OPTION_NOT_SUPPORTED,
+ OPTION_UNKNOWN_ERROR : ST.ERR_OPTION_UNKNOWN_ERROR,
+
+ PARAMETER_INVALID : ST.ERR_PARAMETER_INVALID,
+ PARAMETER_MISSING : ST.ERR_PARAMETER_MISSING,
+ PARAMETER_UNKNOWN_ERROR : ST.ERR_PARAMETER_UNKNOWN_ERROR,
+
+ FORMAT_INVALID : ST.ERR_FORMAT_INVALID,
+ FORMAT_NOT_SUPPORTED : ST.ERR_FORMAT_NOT_SUPPORTED,
+ FORMAT_UNKNOWN : ST.ERR_FORMAT_UNKNOWN,
+ FORMAT_UNKNOWN_ERROR : ST.ERR_FORMAT_UNKNOWN_ERROR,
+
+ RESOURCE_NOT_AVAILABLE : ST.ERR_RESOURCE_NOT_AVAILABLE,
+ RESOURCE_ALLOCATE_FAILURE : ST.ERR_RESOURCE_ALLOCATE_FAILURE,
+ RESOURCE_FULL : ST.ERR_RESOURCE_FULL,
+ RESOURCE_OVERFLOW : ST.ERR_RESOURCE_OVERFLOW,
+ RESOURCE_UNDERRUN : ST.ERR_RESOURCE_UNDERRUN,
+ RESOURCE_UNKNOWN_ERROR : ST.ERR_RESOURCE_UNKNOWN_ERROR,
+
+ ATTRIBUTE_NOT_AVAILABLE : ST.ERR_ATTRIBUTE_NOT_AVAILABLE,
+ ATTRIBUTE_RETRIEVE_FAILURE : ST.ERR_ATTRIBUTE_RETRIEVE_FAILURE,
+ ATTRIBUTE_SET_FAILURE : ST.ERR_ATTRIBUTE_SET_FAILURE,
+ ATTRIBUTE_UPDATE_FAILURE: ST.ERR_ATTRIBUTE_UPDATE_FAILURE,
+ ATTRIBUTE_ACCESS_DENIED : ST.ERR_ATTRIBUTE_ACCESS_DENIED,
+ ATTRIBUTE_UNKNOWN_ERROR : ST.ERR_ATTRIBUTE_UNKNOWN_ERROR,
+
+ COMMAND_FAILURE : ST.ERR_COMMAND_FAILURE,
+
+ IO_NOT_READY : ST.ERR_IO_NOT_READY,
+ IO_BUSY : ST.ERR_IO_BUSY,
+ IO_TIMEOUT : ST.ERR_IO_TIMEOUT,
+ IO_UNKNOWN_ERROR : ST.ERR_IO_UNKNOWN_ERROR,
+
+ UNKNOWN_ERROR : ST.ERR_UNKNOWN_ERROR,
+
+ UPT_ALREADY_INSTALLED_ERROR : ST.ERR_UPT_ALREADY_INSTALLED_ERROR,
+ UPT_ENVIRON_MISSING_ERROR : ST.ERR_UPT_ENVIRON_MISSING_ERROR,
+ UPT_REPKG_ERROR : ST.ERR_UPT_REPKG_ERROR,
+ UPT_ALREADY_RUNNING_ERROR : ST.ERR_UPT_ALREADY_RUNNING_ERROR,
+ UPT_MUL_DEC_ERROR : ST.ERR_MUL_DEC_ERROR,
+ UPT_INI_PARSE_ERROR : ST.ERR_UPT_INI_PARSE_ERROR,
+}
+
+## Exception indicating a fatal error
+#
+class FatalError(Exception):
+ pass
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/__init__.py
new file mode 100644
index 00000000..cb04e7ea
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Logger/__init__.py
@@ -0,0 +1,14 @@
+## @file
+# Python 'Logger' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Logger
+'''
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/MkPkg.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/MkPkg.py
new file mode 100755
index 00000000..d700d057
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/MkPkg.py
@@ -0,0 +1,274 @@
+## @file
+# Install distribution package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+MkPkg
+'''
+
+##
+# Import Modules
+#
+from os import remove
+from os import getcwd
+from os import chdir
+import os.path
+from sys import stdin
+from sys import platform
+from traceback import format_exc
+from platform import python_version
+from hashlib import md5
+from time import strftime
+from time import localtime
+from uuid import uuid4
+
+from Logger import StringTable as ST
+from Logger.ToolError import OPTION_UNKNOWN_ERROR
+from Logger.ToolError import OPTION_VALUE_INVALID
+from Logger.ToolError import ABORT_ERROR
+from Logger.ToolError import UPT_REPKG_ERROR
+from Logger.ToolError import CODE_ERROR
+from Logger.ToolError import FatalError
+from Logger.ToolError import FILE_NOT_FOUND
+import Logger.Log as Logger
+
+from Xml.XmlParser import DistributionPackageXml
+from Xml.IniToXml import IniToXml
+
+from Library import GlobalData
+from Library.ParserValidate import IsValidPath
+
+from Core.DistributionPackageClass import DistributionPackageClass
+from Core.PackageFile import PackageFile
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+
+## CheckForExistingDp
+#
+# Check if there is a same name DP file existing
+# @param Path: The path to be checked
+#
+def CheckForExistingDp(Path):
+ if os.path.exists(Path):
+ Logger.Info(ST.MSG_DISTRIBUTION_PACKAGE_FILE_EXISTS % Path)
+ Input = stdin.readline()
+ Input = Input.replace('\r', '').replace('\n', '')
+ if Input.upper() != "Y":
+ Logger.Error("\nMkPkg", ABORT_ERROR, ST.ERR_USER_ABORT, RaiseError=True)
+
+## Tool entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+#
+def Main(Options = None):
+ if Options is None:
+ Logger.Error("\nMkPkg", OPTION_UNKNOWN_ERROR, ST.ERR_OPTION_NOT_FOUND)
+ try:
+ DataBase = GlobalData.gDB
+ ContentFileClosed = True
+ WorkspaceDir = GlobalData.gWORKSPACE
+
+ #
+ # Init PackFileToCreate
+ #
+ if not Options.PackFileToCreate:
+ Logger.Error("\nMkPkg", OPTION_UNKNOWN_ERROR, ST.ERR_OPTION_NOT_FOUND)
+
+ #
+ # Handle if the distribution package file already exists
+ #
+ CheckForExistingDp(Options.PackFileToCreate)
+
+ #
+ # Check package file existing and valid
+ #
+ CheckFileList('.DEC', Options.PackageFileList, ST.ERR_INVALID_PACKAGE_NAME, ST.ERR_INVALID_PACKAGE_PATH)
+ #
+ # Check module file existing and valid
+ #
+ CheckFileList('.INF', Options.ModuleFileList, ST.ERR_INVALID_MODULE_NAME, ST.ERR_INVALID_MODULE_PATH)
+
+ #
+ # Get list of files that installed with RePackage attribute available
+ #
+ RePkgDict = DataBase.GetRePkgDict()
+
+ ContentFile = PackageFile(GlobalData.gCONTENT_FILE, "w")
+ ContentFileClosed = False
+
+ #
+ # Add temp distribution header
+ #
+ if Options.PackageInformationDataFile:
+ XmlFile = IniToXml(Options.PackageInformationDataFile)
+ DistPkg = DistributionPackageXml().FromXml(XmlFile)
+ remove(XmlFile)
+
+ #
+ # add distribution level tool/misc files
+ # before pack, current dir should be workspace dir, else the full
+ # path will be in the pack file
+ #
+ Cwd = getcwd()
+ chdir(WorkspaceDir)
+ ToolObject = DistPkg.Tools
+ MiscObject = DistPkg.MiscellaneousFiles
+ FileList = []
+ if ToolObject:
+ FileList += ToolObject.GetFileList()
+ if MiscObject:
+ FileList += MiscObject.GetFileList()
+ for FileObject in FileList:
+ #
+ # If you have unicode file names, please convert them to byte
+ # strings in your desired encoding before passing them to
+ # write().
+ #
+ FromFile = os.path.normpath(FileObject.GetURI()).encode('utf_8')
+ FileFullPath = mws.join(WorkspaceDir, FromFile)
+ if FileFullPath in RePkgDict:
+ (DpGuid, DpVersion, DpName, Repackage) = RePkgDict[FileFullPath]
+ if not Repackage:
+ Logger.Error("\nMkPkg",
+ UPT_REPKG_ERROR,
+ ST.ERR_UPT_REPKG_ERROR,
+ ExtraData=ST.MSG_REPKG_CONFLICT %\
+ (FileFullPath, DpGuid, DpVersion, DpName)
+ )
+ else:
+ DistPkg.Header.RePackage = True
+ ContentFile.PackFile(FromFile)
+ chdir(Cwd)
+
+ #
+ # Add init dp information
+ #
+ else:
+ DistPkg = DistributionPackageClass()
+ DistPkg.Header.Name = 'Distribution Package'
+ DistPkg.Header.Guid = str(uuid4())
+ DistPkg.Header.Version = '1.0'
+
+ DistPkg.GetDistributionPackage(WorkspaceDir, Options.PackageFileList, \
+ Options.ModuleFileList)
+ FileList, MetaDataFileList = DistPkg.GetDistributionFileList()
+ for File in FileList + MetaDataFileList:
+ FileFullPath = os.path.normpath(os.path.join(WorkspaceDir, File))
+ #
+ # check whether file was included in a distribution that can not
+ # be repackaged
+ #
+ if FileFullPath in RePkgDict:
+ (DpGuid, DpVersion, DpName, Repackage) = RePkgDict[FileFullPath]
+ if not Repackage:
+ Logger.Error("\nMkPkg",
+ UPT_REPKG_ERROR,
+ ST.ERR_UPT_REPKG_ERROR,
+ ExtraData = \
+ ST.MSG_REPKG_CONFLICT %(FileFullPath, DpName, \
+ DpGuid, DpVersion)
+ )
+ else:
+ DistPkg.Header.RePackage = True
+
+ Cwd = getcwd()
+ chdir(WorkspaceDir)
+ ContentFile.PackFiles(FileList)
+ chdir(Cwd)
+
+ Logger.Verbose(ST.MSG_COMPRESS_DISTRIBUTION_PKG)
+
+ ContentFile.Close()
+ ContentFileClosed = True
+
+ #
+ # Add Md5Signature
+ #
+ DistPkg.Header.Signature = md5(open(str(ContentFile), 'rb').read()).hexdigest()
+ #
+ # Add current Date
+ #
+ DistPkg.Header.Date = str(strftime("%Y-%m-%dT%H:%M:%S", localtime()))
+
+ #
+ # Finish final dp file
+ #
+ DistPkgFile = PackageFile(Options.PackFileToCreate, "w")
+ DistPkgFile.PackFile(str(ContentFile))
+ DistPkgXml = DistributionPackageXml()
+ DistPkgFile.PackData(DistPkgXml.ToXml(DistPkg), GlobalData.gDESC_FILE)
+ DistPkgFile.Close()
+ Logger.Quiet(ST.MSG_FINISH)
+ ReturnCode = 0
+
+ except FatalError as XExcept:
+ ReturnCode = XExcept.args[0]
+ if Logger.GetLevel() <= Logger.DEBUG_9:
+ Logger.Quiet(ST.MSG_PYTHON_ON % \
+ (python_version(), platform) + format_exc())
+ except KeyboardInterrupt:
+ ReturnCode = ABORT_ERROR
+ if Logger.GetLevel() <= Logger.DEBUG_9:
+ Logger.Quiet(ST.MSG_PYTHON_ON % \
+ (python_version(), platform) + format_exc())
+ except OSError:
+ pass
+ except:
+ Logger.Error(
+ "\nMkPkg",
+ CODE_ERROR,
+ ST.ERR_UNKNOWN_FATAL_CREATING_ERR % \
+ Options.PackFileToCreate,
+ ExtraData=ST.MSG_SEARCH_FOR_HELP % ST.MSG_EDKII_MAIL_ADDR,
+ RaiseError=False
+ )
+ Logger.Quiet(ST.MSG_PYTHON_ON % \
+ (python_version(), platform) + format_exc())
+ ReturnCode = CODE_ERROR
+ finally:
+ if os.path.exists(GlobalData.gCONTENT_FILE):
+ if not ContentFileClosed:
+ ContentFile.Close()
+ os.remove(GlobalData.gCONTENT_FILE)
+
+ return ReturnCode
+
+
+## CheckFileList
+#
+# @param QualifiedExt: QualifiedExt
+# @param FileList: FileList
+# @param ErrorStringExt: ErrorStringExt
+# @param ErrorStringFullPath: ErrorStringFullPath
+#
+def CheckFileList(QualifiedExt, FileList, ErrorStringExt, ErrorStringFullPath):
+ if not FileList:
+ return
+ WorkspaceDir = GlobalData.gWORKSPACE
+ WorkspaceDir = os.path.normpath(WorkspaceDir)
+ for Item in FileList:
+ Ext = os.path.splitext(Item)[1]
+ if Ext.upper() != QualifiedExt.upper():
+ Logger.Error("\nMkPkg", OPTION_VALUE_INVALID, \
+ ErrorStringExt % Item)
+
+ Item = os.path.normpath(Item)
+ Path = mws.join(WorkspaceDir, Item)
+ if not os.path.exists(Path):
+ Logger.Error("\nMkPkg", FILE_NOT_FOUND, ST.ERR_NOT_FOUND % Item)
+ elif Item == Path:
+ Logger.Error("\nMkPkg", OPTION_VALUE_INVALID,
+ ErrorStringFullPath % Item)
+ elif not IsValidPath(Item, WorkspaceDir):
+ Logger.Error("\nMkPkg", OPTION_VALUE_INVALID, \
+ ErrorStringExt % Item)
+
+ if not os.path.split(Item)[0]:
+ Logger.Error("\nMkPkg", OPTION_VALUE_INVALID, \
+ ST.ERR_INVALID_METAFILE_PATH % Item)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/CommonObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/CommonObject.py
new file mode 100755
index 00000000..0cd33ef0
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/CommonObject.py
@@ -0,0 +1,953 @@
+## @file
+# This file is used to define common items of class object
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+Common Object
+'''
+from Library.DataType import TAB_LANGUAGE_EN_US
+
+## HelpTextObject
+#
+# @param object: Inherited from object class
+#
+class HelpTextObject(object):
+ def __init__(self):
+ self.HelpText = TextObject()
+
+ def SetHelpText(self, HelpText):
+ self.HelpText = HelpText
+
+ def GetHelpText(self):
+ return self.HelpText
+
+## HelpTextListObject
+#
+# @param object: Inherited from object class
+#
+class HelpTextListObject(object):
+ def __init__(self):
+ self.HelpTextList = []
+
+ def SetHelpTextList(self, HelpTextList):
+ self.HelpTextList = HelpTextList
+
+ def GetHelpTextList(self):
+ return self.HelpTextList
+
+## PromptListObject
+#
+# @param object: Inherited from object class
+#
+class PromptListObject(object):
+ def __init__(self):
+ self.PromptList = []
+
+ def SetPromptList(self, PromptList):
+ self.PromptList = PromptList
+
+ def GetPromptList(self):
+ return self.PromptList
+
+## CommonPropertiesObject
+#
+# This class defined common attribution used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+# @param Usage: Input value for Usage, default is []
+# @param FeatureFlag: Input value for FeatureFalg, default is ''
+# @param SupArchList: Input value for SupArchList, default is []
+# @param HelpText: Input value for HelpText, default is ''
+# @param HelpTextList: Input value for HelpTextList, default is []
+#
+class CommonPropertiesObject(HelpTextObject, HelpTextListObject):
+ def __init__(self):
+ self.Usage = []
+ self.FeatureFlag = ''
+ self.SupArchList = []
+ self.GuidValue = ''
+ HelpTextObject.__init__(self)
+ HelpTextListObject.__init__(self)
+
+ def SetUsage(self, Usage):
+ self.Usage = Usage
+
+ def GetUsage(self):
+ return self.Usage
+
+ def SetFeatureFlag(self, FeatureFlag):
+ self.FeatureFlag = FeatureFlag
+
+ def GetFeatureFlag(self):
+ return self.FeatureFlag
+
+ def SetSupArchList(self, SupArchList):
+ self.SupArchList = SupArchList
+
+ def GetSupArchList(self):
+ return self.SupArchList
+
+ def SetGuidValue(self, GuidValue):
+ self.GuidValue = GuidValue
+
+ def GetGuidValue(self):
+ return self.GuidValue
+
+## CommonHeaderObject
+#
+# This class defined common header items used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+#
+class CommonHeaderObject(object):
+ def __init__(self):
+ self.AbstractList = []
+ self.DescriptionList = []
+ self.CopyrightList = []
+ self.LicenseList = []
+
+ def SetAbstract(self, Abstract):
+ if isinstance(Abstract, list):
+ self.AbstractList = Abstract
+ else:
+ self.AbstractList.append(Abstract)
+
+ def GetAbstract(self):
+ return self.AbstractList
+
+ def SetDescription(self, Description):
+ if isinstance(Description, list):
+ self.DescriptionList = Description
+ else:
+ self.DescriptionList.append(Description)
+
+ def GetDescription(self):
+ return self.DescriptionList
+
+ def SetCopyright(self, Copyright):
+ if isinstance(Copyright, list):
+ self.CopyrightList = Copyright
+ else:
+ self.CopyrightList.append(Copyright)
+
+ def GetCopyright(self):
+ return self.CopyrightList
+
+ def SetLicense(self, License):
+ if isinstance(License, list):
+ self.LicenseList = License
+ else:
+ self.LicenseList.append(License)
+
+ def GetLicense(self):
+ return self.LicenseList
+
+## BinaryHeaderObject
+#
+# This class defined Binary header items used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+#
+class BinaryHeaderObject(object):
+ def __init__(self):
+ self.BinaryHeaderAbstractList = []
+ self.BinaryHeaderDescriptionList = []
+ self.BinaryHeaderCopyrightList = []
+ self.BinaryHeaderLicenseList = []
+
+ def SetBinaryHeaderAbstract(self, Abstract):
+ if isinstance(Abstract, list) and Abstract:
+ self.BinaryHeaderAbstractList = Abstract
+ elif isinstance(Abstract, tuple) and Abstract[1]:
+ self.BinaryHeaderAbstractList.append(Abstract)
+
+ def GetBinaryHeaderAbstract(self):
+ return self.BinaryHeaderAbstractList
+
+ def SetBinaryHeaderDescription(self, Description):
+ if isinstance(Description, list) and Description:
+ self.BinaryHeaderDescriptionList = Description
+ elif isinstance(Description, tuple) and Description[1]:
+ self.BinaryHeaderDescriptionList.append(Description)
+
+ def GetBinaryHeaderDescription(self):
+ return self.BinaryHeaderDescriptionList
+
+ def SetBinaryHeaderCopyright(self, Copyright):
+ if isinstance(Copyright, list) and Copyright:
+ self.BinaryHeaderCopyrightList = Copyright
+ elif isinstance(Copyright, tuple) and Copyright[1]:
+ self.BinaryHeaderCopyrightList.append(Copyright)
+
+ def GetBinaryHeaderCopyright(self):
+ return self.BinaryHeaderCopyrightList
+
+ def SetBinaryHeaderLicense(self, License):
+ if isinstance(License, list) and License:
+ self.BinaryHeaderLicenseList = License
+ elif isinstance(License, tuple) and License[1]:
+ self.BinaryHeaderLicenseList.append(License)
+
+ def GetBinaryHeaderLicense(self):
+ return self.BinaryHeaderLicenseList
+
+## ClonedRecordObject
+#
+# This class defined ClonedRecord items used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+#
+class ClonedRecordObject(object):
+ def __init__(self):
+ self.IdNum = 0
+ self.FarGuid = ''
+ self.PackageGuid = ''
+ self.PackageVersion = ''
+ self.ModuleGuid = ''
+ self.ModuleVersion = ''
+
+ def SetId(self, IdNo):
+ self.IdNum = IdNo
+
+ def GetId(self):
+ return self.IdNum
+
+ def SetFarGuid(self, FarGuid):
+ self.FarGuid = FarGuid
+
+ def GetFarGuid(self):
+ return self.FarGuid
+
+ def SetPackageGuid(self, PackageGuid):
+ self.PackageGuid = PackageGuid
+
+ def GetPackageGuid(self):
+ return self.PackageGuid
+
+ def SetPackageVersion(self, PackageVersion):
+ self.PackageVersion = PackageVersion
+
+ def GetPackageVersion(self):
+ return self.PackageVersion
+
+ def SetModuleGuid(self, ModuleGuid):
+ self.ModuleGuid = ModuleGuid
+
+ def GetModuleGuid(self):
+ return self.ModuleGuid
+
+ def SetModuleVersion(self, ModuleVersion):
+ self.ModuleVersion = ModuleVersion
+
+ def GetModuleVersion(self):
+ return self.ModuleVersion
+
+## TextObject
+#
+# This class defined Text item used in PKG file
+#
+# @param object: Inherited from object class
+#
+class TextObject(object):
+ def __init__(self):
+ self.Lang = TAB_LANGUAGE_EN_US
+ self.String = ''
+
+ def SetLang(self, Lang):
+ self.Lang = Lang
+
+ def GetLang(self):
+ return self.Lang
+
+ def SetString(self, String):
+ self.String = String
+
+ def GetString(self):
+ return self.String
+
+## FileNameObject
+#
+# This class defined File item used in module, for binary files
+#
+# @param CommonPropertiesObject: Inherited from CommonPropertiesObject class
+#
+class FileNameObject(CommonPropertiesObject):
+ def __init__(self):
+ self.FileType = ''
+ self.Filename = ''
+ CommonPropertiesObject.__init__(self)
+
+ def SetFileType(self, FileType):
+ self.FileType = FileType
+
+ def GetFileType(self):
+ return self.FileType
+
+ def SetFilename(self, Filename):
+ self.Filename = Filename
+
+ def GetFilename(self):
+ return self.Filename
+
+## FileObject
+#
+# This class defined File item used in PKG file
+#
+# @param object: Inherited from object class
+#
+class FileObject(object):
+ def __init__(self):
+ self.Executable = ''
+ self.Uri = ''
+ self.OsType = ''
+
+ def SetExecutable(self, Executable):
+ self.Executable = Executable
+
+ def GetExecutable(self):
+ return self.Executable
+
+ def SetURI(self, URI):
+ self.Uri = URI
+
+ def GetURI(self):
+ return self.Uri
+
+ def SetOS(self, OsType):
+ self.OsType = OsType
+
+ def GetOS(self):
+ return self.OsType
+
+##
+# MiscFileObject is used for xml
+#
+# @param CommonHeaderObject: Inherited from CommonHeaderObject class
+#
+class MiscFileObject(CommonHeaderObject):
+ def __init__(self):
+ self.Name = ''
+ self.FileList = []
+ CommonHeaderObject.__init__(self)
+
+ def SetName(self, Name):
+ self.Name = Name
+
+ def GetName(self):
+ return self.Name
+
+ def SetFileList(self, FileList):
+ self.FileList = FileList
+
+ def GetFileList(self):
+ return self.FileList
+
+##
+# ToolsObject
+#
+class ToolsObject(MiscFileObject):
+ pass
+
+## GuidVersionObject
+#
+# This class defined GUID/Version items used in PKG file
+#
+# @param object: Inherited from object class
+#
+class GuidVersionObject(object):
+ def __init__(self):
+ self.Guid = ''
+ self.Version = ''
+
+ def SetGuid(self, Guid):
+ self.Guid = Guid
+
+ def GetGuid(self):
+ return self.Guid
+
+ def SetVersion(self, Version):
+ self.Version = Version
+
+ def GetVersion(self):
+ return self.Version
+
+## IdentificationObject
+#
+# This class defined Identification items used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+#
+class IdentificationObject(GuidVersionObject):
+ def __init__(self):
+ self.Name = ''
+ self.BaseName = ''
+ self.FileName = ''
+ self.FullPath = ''
+ self.RelaPath = ''
+ self.PackagePath = ''
+ self.ModulePath = ''
+ self.CombinePath = ''
+ GuidVersionObject.__init__(self)
+
+ def SetName(self, Name):
+ self.Name = Name
+
+ def GetName(self):
+ return self.Name
+
+ def SetBaseName(self, BaseName):
+ self.BaseName = BaseName
+
+ def GetBaseName(self):
+ return self.BaseName
+
+ def SetFileName(self, FileName):
+ self.FileName = FileName
+
+ def GetFileName(self):
+ return self.FileName
+
+ def SetFullPath(self, FullPath):
+ self.FullPath = FullPath
+
+ def GetFullPath(self):
+ return self.FullPath
+
+ def SetRelaPath(self, RelaPath):
+ self.RelaPath = RelaPath
+
+ def GetRelaPath(self):
+ return self.RelaPath
+
+ def SetPackagePath(self, PackagePath):
+ self.PackagePath = PackagePath
+
+ def GetPackagePath(self):
+ return self.PackagePath
+
+ def SetModulePath(self, ModulePath):
+ self.ModulePath = ModulePath
+
+ def GetModulePath(self):
+ return self.ModulePath
+
+ def SetCombinePath(self, CombinePath):
+ self.CombinePath = CombinePath
+
+ def GetCombinePath(self):
+ return self.CombinePath
+
+## GuidProtocolPpiCommonObject
+#
+# This class defined Guid, Protocol and Ppi like items used in
+# Module/Platform/Package files
+#
+# @param CommonPropertiesObject: Inherited from CommonPropertiesObject class
+#
+class GuidProtocolPpiCommonObject(CommonPropertiesObject):
+ def __init__(self):
+ self.Name = ''
+ self.CName = ''
+ self.Guid = ''
+ self.SupModuleList = []
+ CommonPropertiesObject.__init__(self)
+
+ def SetName(self, Name):
+ self.Name = Name
+
+ def GetName(self):
+ return self.Name
+
+ def SetCName(self, CName):
+ self.CName = CName
+
+ def GetCName(self):
+ return self.CName
+
+ def SetGuid(self, Guid):
+ self.Guid = Guid
+
+ def GetGuid(self):
+ return self.Guid
+
+ def SetSupModuleList(self, SupModuleList):
+ self.SupModuleList = SupModuleList
+
+ def GetSupModuleList(self):
+ return self.SupModuleList
+
+## GuidObject
+#
+# This class defined Guid item used in Module/Platform/Package files
+#
+# @param GuidProtocolPpiCommonObject: GuidProtocolPpiCommonObject
+#
+class GuidObject(GuidProtocolPpiCommonObject):
+ def __init__(self):
+ self.VariableName = ''
+ self.GuidTypeList = []
+ GuidProtocolPpiCommonObject.__init__(self)
+ def SetVariableName(self, VariableName):
+ self.VariableName = VariableName
+
+ def GetVariableName(self):
+ return self.VariableName
+
+ def SetGuidTypeList(self, GuidTypeList):
+ self.GuidTypeList = GuidTypeList
+
+ def GetGuidTypeList(self):
+ return self.GuidTypeList
+
+## ProtocolObject
+#
+# This class defined Protocol item used in Module/Platform/Package files
+#
+# @param GuidProtocolPpiCommonObject: Inherited from
+# GuidProtocolPpiCommonObject
+#
+class ProtocolObject(GuidProtocolPpiCommonObject):
+ def __init__(self):
+ self.Notify = False
+ GuidProtocolPpiCommonObject.__init__(self)
+ def SetNotify(self, Notify):
+ self.Notify = Notify
+
+ def GetNotify(self):
+ return self.Notify
+
+## PpiObject
+#
+# This class defined Ppi item used in Module/Platform/Package files
+#
+# @param GuidProtocolPpiCommonObject: Inherited from
+# GuidProtocolPpiCommonObject
+#
+class PpiObject(GuidProtocolPpiCommonObject):
+ def __init__(self):
+ self.Notify = False
+ GuidProtocolPpiCommonObject.__init__(self)
+ def SetNotify(self, Notify):
+ self.Notify = Notify
+
+ def GetNotify(self):
+ return self.Notify
+
+## DefineObject
+#
+# This class defined item DEFINE used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+#
+class DefineClass(object):
+ def __init__(self):
+ self.Define = {}
+
+## UserExtensionObject
+#
+# @param object: Inherited from object class
+#
+class UserExtensionObject(object):
+ def __init__(self):
+ self.UserID = ''
+ self.Identifier = ''
+ self.BinaryAbstractList = []
+ self.BinaryDescriptionList = []
+ self.BinaryCopyrightList = []
+ self.BinaryLicenseList = []
+ self.UniLangDefsList = []
+ #
+ # { Statement : Arch , ... }
+ #
+ self.DefinesDict = {}
+ #
+ # { Arch : Statement , ... }
+ #
+ self.BuildOptionDict = {}
+ self.IncludesDict = {}
+ self.SourcesDict = {}
+ self.BinariesDict = {}
+ #
+ # UserExtension statement from meta-data file [UserExtension] section
+ #
+ self.Statement = ''
+ self.SupArchList = []
+
+ def SetStatement(self, Statement):
+ self.Statement = Statement
+
+ def GetStatement(self):
+ return self.Statement
+
+ def SetSupArchList(self, ArchList):
+ self.SupArchList = ArchList
+
+ def GetSupArchList(self):
+ return self.SupArchList
+
+ def SetUserID(self, UserID):
+ self.UserID = UserID
+
+ def GetUserID(self):
+ return self.UserID
+
+ def SetIdentifier(self, Identifier):
+ self.Identifier = Identifier
+
+ def GetIdentifier(self):
+ return self.Identifier
+
+ def SetUniLangDefsList(self, UniLangDefsList):
+ self.UniLangDefsList = UniLangDefsList
+
+ def GetUniLangDefsList(self):
+ return self.UniLangDefsList
+
+ def SetBinaryAbstract(self, BinaryAbstractList):
+ self.BinaryAbstractList = BinaryAbstractList
+
+ def GetBinaryAbstract(self, Lang=None):
+ if Lang:
+ for (Key, Value) in self.BinaryAbstractList:
+ if Key == Lang:
+ return Value
+ return None
+ else:
+ return self.BinaryAbstractList
+
+ def SetBinaryDescription(self, BinaryDescriptionList):
+ self.BinaryDescriptionList = BinaryDescriptionList
+
+ def GetBinaryDescription(self, Lang=None):
+ if Lang:
+ for (Key, Value) in self.BinaryDescriptionList:
+ if Key == Lang:
+ return Value
+ return None
+ else:
+ return self.BinaryDescriptionList
+
+ def SetBinaryCopyright(self, BinaryCopyrightList):
+ self.BinaryCopyrightList = BinaryCopyrightList
+
+ def GetBinaryCopyright(self, Lang=None):
+ if Lang:
+ for (Key, Value) in self.BinaryCopyrightList:
+ if Key == Lang:
+ return Value
+ return None
+ else:
+ return self.BinaryCopyrightList
+
+ def SetBinaryLicense(self, BinaryLicenseList):
+ self.BinaryLicenseList = BinaryLicenseList
+
+ def GetBinaryLicense(self, Lang=None):
+ if Lang:
+ for (Key, Value) in self.BinaryLicenseList:
+ if Key == Lang:
+ return Value
+ return None
+ else:
+ return self.BinaryLicenseList
+
+ def SetDefinesDict(self, DefinesDict):
+ self.DefinesDict = DefinesDict
+
+ def GetDefinesDict(self):
+ return self.DefinesDict
+
+ def SetBuildOptionDict(self, BuildOptionDict):
+ self.BuildOptionDict = BuildOptionDict
+
+ def GetBuildOptionDict(self):
+ return self.BuildOptionDict
+
+ def SetIncludesDict(self, IncludesDict):
+ self.IncludesDict = IncludesDict
+
+ def GetIncludesDict(self):
+ return self.IncludesDict
+
+ def SetSourcesDict(self, SourcesDict):
+ self.SourcesDict = SourcesDict
+
+ def GetSourcesDict(self):
+ return self.SourcesDict
+
+ def SetBinariesDict(self, BinariesDict):
+ self.BinariesDict = BinariesDict
+
+ def GetBinariesDict(self):
+ return self.BinariesDict
+
+## LibraryClassObject
+#
+# This class defined Library item used in Module/Platform/Package files
+#
+# @param CommonPropertiesObject: Inherited from CommonPropertiesObject class
+#
+class LibraryClassObject(CommonPropertiesObject):
+ def __init__(self):
+ self.LibraryClass = ''
+ self.IncludeHeader = ''
+ self.SupModuleList = []
+ self.RecommendedInstance = GuidVersionObject()
+ CommonPropertiesObject.__init__(self)
+
+ def SetLibraryClass(self, LibraryClass):
+ self.LibraryClass = LibraryClass
+
+ def GetLibraryClass(self):
+ return self.LibraryClass
+
+ def SetSupModuleList(self, SupModuleList):
+ self.SupModuleList = SupModuleList
+
+ def GetSupModuleList(self):
+ return self.SupModuleList
+
+ def SetIncludeHeader(self, IncludeHeader):
+ self.IncludeHeader = IncludeHeader
+
+ def GetIncludeHeader(self):
+ return self.IncludeHeader
+
+ def SetRecommendedInstance(self, RecommendedInstance):
+ self.RecommendedInstance = RecommendedInstance
+
+ def GetRecommendedInstance(self):
+ return self.RecommendedInstance
+
+
+## PcdErrorObject
+#
+# @param object: Inherited from object class
+#
+class PcdErrorObject(object):
+ def __init__(self):
+ self.ValidValue = ''
+ self.ValidValueLang = ''
+ self.ValidValueRange = ''
+ self.Expression = ''
+ self.ErrorNumber = ''
+ self.ErrorMessageList = []
+ self.TokenSpaceGuidCName = ''
+ self.CName = ''
+ self.FileLine = ''
+ self.LineNum = 0
+
+ def SetValidValue(self, ValidValue):
+ self.ValidValue = ValidValue
+
+ def GetValidValue(self):
+ return self.ValidValue
+
+ def SetValidValueLang(self, ValidValueLang):
+ self.ValidValueLang = ValidValueLang
+
+ def GetValidValueLang(self):
+ return self.ValidValueLang
+
+ def SetValidValueRange(self, ValidValueRange):
+ self.ValidValueRange = ValidValueRange
+
+ def GetValidValueRange(self):
+ return self.ValidValueRange
+
+ def SetExpression(self, Expression):
+ self.Expression = Expression
+
+ def GetExpression(self):
+ return self.Expression
+
+ def SetErrorNumber(self, ErrorNumber):
+ self.ErrorNumber = ErrorNumber
+
+ def GetErrorNumber(self):
+ return self.ErrorNumber
+
+ def SetErrorMessageList(self, ErrorMessageList):
+ self.ErrorMessageList = ErrorMessageList
+
+ def GetErrorMessageList(self):
+ return self.ErrorMessageList
+
+ def SetTokenSpaceGuidCName(self, TokenSpaceGuidCName):
+ self.TokenSpaceGuidCName = TokenSpaceGuidCName
+
+ def GetTokenSpaceGuidCName(self):
+ return self.TokenSpaceGuidCName
+
+ def SetCName(self, CName):
+ self.CName = CName
+
+ def GetCName(self):
+ return self.CName
+
+ def SetFileLine(self, FileLine):
+ self.FileLine = FileLine
+
+ def GetFileLine(self):
+ return self.FileLine
+
+ def SetLineNum(self, LineNum):
+ self.LineNum = LineNum
+
+ def GetLineNum(self):
+ return self.LineNum
+
+
+## IncludeObject
+#
+# This class defined Include item used in Module/Platform/Package files
+#
+# @param CommonPropertiesObject: Inherited from CommonPropertiesObject class
+#
+class IncludeObject(CommonPropertiesObject):
+ def __init__(self):
+ self.FilePath = ''
+ self.ModuleType = ''
+ self.SupModuleList = []
+ self.Comment = ''
+ CommonPropertiesObject.__init__(self)
+
+ def SetFilePath(self, FilePath):
+ self.FilePath = FilePath
+
+ def GetFilePath(self):
+ return self.FilePath
+
+ def SetModuleType(self, ModuleType):
+ self.ModuleType = ModuleType
+
+ def GetModuleType(self):
+ return self.ModuleType
+
+ def SetSupModuleList(self, SupModuleList):
+ self.SupModuleList = SupModuleList
+
+ def GetSupModuleList(self):
+ return self.SupModuleList
+
+ def SetComment(self, Comment):
+ self.Comment = Comment
+
+ def GetComment(self):
+ return self.Comment
+
+## PcdObject
+#
+# This class defined Pcd item used in Module/Platform/Package files
+#
+# @param CName: Input value for CName, default is ''
+# @param Token: Input value for Token, default is ''
+# @param TokenSpaceGuidCName: Input value for TokenSpaceGuidCName, default is
+# ''
+# @param DatumType: Input value for DatumType, default is ''
+# @param MaxDatumSize: Input value for MaxDatumSize, default is ''
+# @param DefaultValue: Input value for DefaultValue, default is ''
+# @param ItemType: Input value for ItemType, default is ''
+# @param ValidUsage: Input value for ValidUsage, default is []
+# @param SkuInfoList: Input value for SkuInfoList, default is {}
+# @param SupModuleList: Input value for SupModuleList, default is []
+#
+class PcdObject(CommonPropertiesObject, HelpTextListObject, PromptListObject):
+ def __init__(self):
+ self.PcdCName = ''
+ self.CName = ''
+ self.Token = ''
+ self.TokenSpaceGuidCName = ''
+ self.TokenSpaceGuidValue = ''
+ self.DatumType = ''
+ self.MaxDatumSize = ''
+ self.DefaultValue = ''
+ self.Offset = ''
+ self.ValidUsage = ''
+ self.ItemType = ''
+ self.PcdErrorsList = []
+ self.SupModuleList = []
+ CommonPropertiesObject.__init__(self)
+ HelpTextListObject.__init__(self)
+ PromptListObject.__init__(self)
+
+ def SetPcdCName(self, PcdCName):
+ self.PcdCName = PcdCName
+
+ def GetPcdCName(self):
+ return self.PcdCName
+
+ def SetCName(self, CName):
+ self.CName = CName
+
+ def GetCName(self):
+ return self.CName
+
+ def SetToken(self, Token):
+ self.Token = Token
+
+ def GetOffset(self):
+ return self.Offset
+
+ def SetOffset(self, Offset):
+ self.Offset = Offset
+
+ def GetToken(self):
+ return self.Token
+
+ def SetTokenSpaceGuidCName(self, TokenSpaceGuidCName):
+ self.TokenSpaceGuidCName = TokenSpaceGuidCName
+
+ def GetTokenSpaceGuidCName(self):
+ return self.TokenSpaceGuidCName
+
+ def SetTokenSpaceGuidValue(self, TokenSpaceGuidValue):
+ self.TokenSpaceGuidValue = TokenSpaceGuidValue
+
+ def GetTokenSpaceGuidValue(self):
+ return self.TokenSpaceGuidValue
+
+ def SetDatumType(self, DatumType):
+ self.DatumType = DatumType
+
+ def GetDatumType(self):
+ return self.DatumType
+
+ def SetMaxDatumSize(self, MaxDatumSize):
+ self.MaxDatumSize = MaxDatumSize
+
+ def GetMaxDatumSize(self):
+ return self.MaxDatumSize
+
+ def SetDefaultValue(self, DefaultValue):
+ self.DefaultValue = DefaultValue
+
+ def GetDefaultValue(self):
+ return self.DefaultValue
+
+ def SetValidUsage(self, ValidUsage):
+ self.ValidUsage = ValidUsage
+
+ def GetValidUsage(self):
+ return self.ValidUsage
+
+ def SetPcdErrorsList(self, PcdErrorsList):
+ self.PcdErrorsList = PcdErrorsList
+
+ def GetPcdErrorsList(self):
+ return self.PcdErrorsList
+
+ def SetItemType(self, ItemType):
+ self.ItemType = ItemType
+
+ def GetItemType(self):
+ return self.ItemType
+
+ def SetSupModuleList(self, SupModuleList):
+ self.SupModuleList = SupModuleList
+
+ def GetSupModuleList(self):
+ return self.SupModuleList
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py
new file mode 100755
index 00000000..de2d29bc
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py
@@ -0,0 +1,654 @@
+## @file
+# This file is used to define a class object to describe a module
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+ModuleObject
+'''
+
+##
+# Import Modules
+#
+from Object.POM.CommonObject import CommonPropertiesObject
+from Object.POM.CommonObject import IdentificationObject
+from Object.POM.CommonObject import CommonHeaderObject
+from Object.POM.CommonObject import BinaryHeaderObject
+from Object.POM.CommonObject import HelpTextListObject
+from Object.POM.CommonObject import GuidVersionObject
+
+
+##
+# BootModeObject
+#
+class BootModeObject(CommonPropertiesObject, HelpTextListObject):
+ def __init__(self):
+ self.SupportedBootModes = ''
+ CommonPropertiesObject.__init__(self)
+ HelpTextListObject.__init__(self)
+
+ def SetSupportedBootModes(self, SupportedBootModes):
+ self.SupportedBootModes = SupportedBootModes
+
+ def GetSupportedBootModes(self):
+ return self.SupportedBootModes
+
+##
+# EventObject
+#
+class EventObject(CommonPropertiesObject, HelpTextListObject):
+ def __init__(self):
+ self.EventType = ''
+ CommonPropertiesObject.__init__(self)
+ HelpTextListObject.__init__(self)
+
+ def SetEventType(self, EventType):
+ self.EventType = EventType
+
+ def GetEventType(self):
+ return self.EventType
+
+##
+# HobObject
+#
+class HobObject(CommonPropertiesObject, HelpTextListObject):
+ def __init__(self):
+ self.HobType = ''
+ CommonPropertiesObject.__init__(self)
+ HelpTextListObject.__init__(self)
+
+ def SetHobType(self, HobType):
+ self.HobType = HobType
+
+ def GetHobType(self):
+ return self.HobType
+
+##
+# SpecObject
+#
+class SpecObject(object):
+ def __init__(self):
+ self.Spec = ''
+ self.Version = ''
+
+ def SetSpec(self, Spec):
+ self.Spec = Spec
+
+ def GetSpec(self):
+ return self.Spec
+
+ def SetVersion(self, Version):
+ self.Version = Version
+
+ def GetVersion(self):
+ return self.Version
+
+## ModuleHeaderObject
+#
+# This class defined header items used in Module file
+#
+class ModuleHeaderObject(IdentificationObject, CommonHeaderObject, BinaryHeaderObject):
+ def __init__(self):
+ self.IsLibrary = False
+ self.IsLibraryModList = []
+ self.ModuleType = ''
+ self.BinaryModule = False
+ self.PcdIsDriver = ''
+ self.PiSpecificationVersion = ''
+ self.UefiSpecificationVersion = ''
+ self.UNIFlag = False
+ self.ModuleUniFile = ''
+ #
+ # SpecObject
+ #
+ self.SpecList = []
+ #
+ # BootModeObject
+ #
+ self.BootModeList = []
+ #
+ # EventObject
+ #
+ self.EventList = []
+ #
+ # HobObject
+ #
+ self.HobList = []
+ #
+ # LibraryClassObject
+ #
+ self.LibraryClassList = []
+ self.SupArchList = []
+ IdentificationObject.__init__(self)
+ CommonHeaderObject.__init__(self)
+ BinaryHeaderObject.__init__(self)
+
+ def SetIsLibrary(self, IsLibrary):
+ self.IsLibrary = IsLibrary
+
+ def GetIsLibrary(self):
+ return self.IsLibrary
+
+ def SetIsLibraryModList(self, IsLibraryModList):
+ self.IsLibraryModList = IsLibraryModList
+
+ def GetIsLibraryModList(self):
+ return self.IsLibraryModList
+
+ def SetModuleType(self, ModuleType):
+ self.ModuleType = ModuleType
+
+ def GetModuleType(self):
+ return self.ModuleType
+
+ def SetBinaryModule(self, BinaryModule):
+ self.BinaryModule = BinaryModule
+
+ def GetBinaryModule(self):
+ return self.BinaryModule
+
+ def SetPcdIsDriver(self, PcdIsDriver):
+ self.PcdIsDriver = PcdIsDriver
+
+ def GetPcdIsDriver(self):
+ return self.PcdIsDriver
+
+ def SetPiSpecificationVersion(self, PiSpecificationVersion):
+ self.PiSpecificationVersion = PiSpecificationVersion
+
+ def GetPiSpecificationVersion(self):
+ return self.PiSpecificationVersion
+
+ def SetUefiSpecificationVersion(self, UefiSpecificationVersion):
+ self.UefiSpecificationVersion = UefiSpecificationVersion
+
+ def GetUefiSpecificationVersion(self):
+ return self.UefiSpecificationVersion
+
+ def SetSpecList(self, SpecList):
+ self.SpecList = SpecList
+
+ def GetSpecList(self):
+ return self.SpecList
+
+ def SetBootModeList(self, BootModeList):
+ self.BootModeList = BootModeList
+
+ def GetBootModeList(self):
+ return self.BootModeList
+
+ def SetEventList(self, EventList):
+ self.EventList = EventList
+
+ def GetEventList(self):
+ return self.EventList
+
+ def SetHobList(self, HobList):
+ self.HobList = HobList
+
+ def GetHobList(self):
+ return self.HobList
+
+ def SetLibraryClassList(self, LibraryClassList):
+ self.LibraryClassList = LibraryClassList
+
+ def GetLibraryClassList(self):
+ return self.LibraryClassList
+
+ def SetSupArchList(self, SupArchList):
+ self.SupArchList = SupArchList
+
+ def GetSupArchList(self):
+ return self.SupArchList
+
+ def SetModuleUniFile(self, ModuleUniFile):
+ self.ModuleUniFile = ModuleUniFile
+
+ def GetModuleUniFile(self):
+ return self.ModuleUniFile
+##
+# SourceFileObject
+#
+class SourceFileObject(CommonPropertiesObject):
+ def __init__(self):
+ CommonPropertiesObject.__init__(self)
+ self.SourceFile = ''
+ self.TagName = ''
+ self.ToolCode = ''
+ self.Family = ''
+ self.FileType = ''
+
+ def SetSourceFile(self, SourceFile):
+ self.SourceFile = SourceFile
+
+ def GetSourceFile(self):
+ return self.SourceFile
+
+ def SetTagName(self, TagName):
+ self.TagName = TagName
+
+ def GetTagName(self):
+ return self.TagName
+
+ def SetToolCode(self, ToolCode):
+ self.ToolCode = ToolCode
+
+ def GetToolCode(self):
+ return self.ToolCode
+
+ def SetFamily(self, Family):
+ self.Family = Family
+
+ def GetFamily(self):
+ return self.Family
+
+ def SetFileType(self, FileType):
+ self.FileType = FileType
+
+ def GetFileType(self):
+ return self.FileType
+
+
+##
+# BinaryFileObject
+#
+class BinaryFileObject(CommonPropertiesObject):
+ def __init__(self):
+ self.FileNamList = []
+ self.AsBuiltList = []
+ CommonPropertiesObject.__init__(self)
+
+ def SetFileNameList(self, FileNamList):
+ self.FileNamList = FileNamList
+
+ def GetFileNameList(self):
+ return self.FileNamList
+
+ def SetAsBuiltList(self, AsBuiltList):
+ self.AsBuiltList = AsBuiltList
+
+ def GetAsBuiltList(self):
+ return self.AsBuiltList
+
+
+##
+# AsBuildLibraryClassObject
+#
+class AsBuildLibraryClassObject(object):
+ def __init__(self):
+ self.LibGuid = ''
+ self.LibVersion = ''
+ self.SupArchList = []
+
+ def SetLibGuid(self, LibGuid):
+ self.LibGuid = LibGuid
+ def GetLibGuid(self):
+ return self.LibGuid
+
+ def SetLibVersion(self, LibVersion):
+ self.LibVersion = LibVersion
+ def GetLibVersion(self):
+ return self.LibVersion
+
+ def SetSupArchList(self, SupArchList):
+ self.SupArchList = SupArchList
+ def GetSupArchList(self):
+ return self.SupArchList
+
+##
+# AsBuiltObject
+#
+class AsBuiltObject(object):
+ def __init__(self):
+ #
+ # list of PcdObject
+ #
+ self.PatchPcdList = []
+ #
+ # list of PcdObject
+ #
+ self.PcdExValueList = []
+ #
+ # list of GuidVersionObject
+ #
+ self.LibraryInstancesList = []
+ #
+ # List of BinaryBuildFlag object
+ #
+ self.BinaryBuildFlagList = []
+
+ def SetPatchPcdList(self, PatchPcdList):
+ self.PatchPcdList = PatchPcdList
+
+ def GetPatchPcdList(self):
+ return self.PatchPcdList
+
+ def SetPcdExList(self, PcdExValueList):
+ self.PcdExValueList = PcdExValueList
+
+ def GetPcdExList(self):
+ return self.PcdExValueList
+
+ def SetLibraryInstancesList(self, LibraryInstancesList):
+ self.LibraryInstancesList = LibraryInstancesList
+
+ def GetLibraryInstancesList(self):
+ return self.LibraryInstancesList
+
+ def SetBuildFlagsList(self, BinaryBuildFlagList):
+ self.BinaryBuildFlagList = BinaryBuildFlagList
+
+ def GetBuildFlagsList(self):
+ return self.BinaryBuildFlagList
+
+##
+# BinaryBuildFlag, this object will include those fields that are not
+# covered by the UPT Spec BinaryFile field
+#
+class BinaryBuildFlagObject(object):
+ def __init__(self):
+ self.Target = ''
+ self.TagName = ''
+ self.Family = ''
+ self.AsBuiltOptionFlags = ''
+
+ def SetTarget(self, Target):
+ self.Target = Target
+
+ def GetTarget(self):
+ return self.Target
+
+ def SetTagName(self, TagName):
+ self.TagName = TagName
+
+ def GetTagName(self):
+ return self.TagName
+
+ def SetFamily(self, Family):
+ self.Family = Family
+
+ def GetFamily(self):
+ return self.Family
+
+ def SetAsBuiltOptionFlags(self, AsBuiltOptionFlags):
+ self.AsBuiltOptionFlags = AsBuiltOptionFlags
+ def GetAsBuiltOptionFlags(self):
+ return self.AsBuiltOptionFlags
+
+##
+# ExternObject
+#
+class ExternObject(CommonPropertiesObject):
+ def __init__(self):
+ self.EntryPoint = ''
+ self.UnloadImage = ''
+ self.Constructor = ''
+ self.Destructor = ''
+ self.SupModList = []
+ CommonPropertiesObject.__init__(self)
+
+ def SetEntryPoint(self, EntryPoint):
+ self.EntryPoint = EntryPoint
+
+ def GetEntryPoint(self):
+ return self.EntryPoint
+
+ def SetUnloadImage(self, UnloadImage):
+ self.UnloadImage = UnloadImage
+
+ def GetUnloadImage(self):
+ return self.UnloadImage
+
+ def SetConstructor(self, Constructor):
+ self.Constructor = Constructor
+
+ def GetConstructor(self):
+ return self.Constructor
+
+ def SetDestructor(self, Destructor):
+ self.Destructor = Destructor
+
+ def GetDestructor(self):
+ return self.Destructor
+
+ def SetSupModList(self, SupModList):
+ self.SupModList = SupModList
+ def GetSupModList(self):
+ return self.SupModList
+
+##
+# DepexObject
+#
+class DepexObject(CommonPropertiesObject):
+ def __init__(self):
+ self.Depex = ''
+ self.ModuelType = ''
+ CommonPropertiesObject.__init__(self)
+
+ def SetDepex(self, Depex):
+ self.Depex = Depex
+
+ def GetDepex(self):
+ return self.Depex
+
+ def SetModuleType(self, ModuleType):
+ self.ModuelType = ModuleType
+
+ def GetModuleType(self):
+ return self.ModuelType
+
+##
+# PackageDependencyObject
+#
+class PackageDependencyObject(GuidVersionObject, CommonPropertiesObject):
+ def __init__(self):
+ self.Package = ''
+ self.PackageFilePath = ''
+ GuidVersionObject.__init__(self)
+ CommonPropertiesObject.__init__(self)
+
+ def SetPackageFilePath(self, PackageFilePath):
+ self.PackageFilePath = PackageFilePath
+
+ def GetPackageFilePath(self):
+ return self.PackageFilePath
+
+ def SetPackage(self, Package):
+ self.Package = Package
+
+ def GetPackage(self):
+ return self.Package
+
+##
+# BuildOptionObject
+#
+class BuildOptionObject(CommonPropertiesObject):
+ def __init__(self):
+ CommonPropertiesObject.__init__(self)
+ self.BuildOption = ''
+
+ def SetBuildOption(self, BuildOption):
+ self.BuildOption = BuildOption
+
+ def GetBuildOption(self):
+ return self.BuildOption
+
+##
+# ModuleObject
+#
+class ModuleObject(ModuleHeaderObject):
+ def __init__(self):
+ #
+ # {Arch : ModuleHeaderObject}
+ #
+ self.HeaderDict = {}
+ #
+ # LibraryClassObject
+ #
+ self.LibraryClassList = []
+ #
+ # SourceFileObject
+ #
+ self.SourceFileList = []
+ #
+ # BinaryFileObject
+ #
+ self.BinaryFileList = []
+ #
+ # PackageDependencyObject
+ #
+ self.PackageDependencyList = []
+ #
+ # DepexObject
+ #
+ self.PeiDepex = []
+ #
+ # DepexObject
+ #
+ self.DxeDepex = []
+ #
+ # DepexObject
+ #
+ self.SmmDepex = []
+ #
+ # ProtocolObject
+ #
+ self.ProtocolList = []
+ #
+ # PpiObject
+ #
+ self.PpiList = []
+ #
+ # GuidObject
+ #
+ self.GuidList = []
+ #
+ # PcdObject
+ #
+ self.PcdList = []
+ #
+ # ExternObject
+ #
+ self.ExternList = []
+ #
+ # BuildOptionObject
+ #
+ self.BuildOptionList = []
+ #
+ # UserExtensionObject
+ #
+ self.UserExtensionList = []
+ #
+ # MiscFileObject
+ #
+ self.MiscFileList = []
+ #
+ # ClonedFromObject
+ #
+ self.ClonedFrom = None
+
+ ModuleHeaderObject.__init__(self)
+
+ def SetHeaderDict(self, HeaderDict):
+ self.HeaderDict = HeaderDict
+
+ def GetHeaderDict(self):
+ return self.HeaderDict
+
+ def SetLibraryClassList(self, LibraryClassList):
+ self.LibraryClassList = LibraryClassList
+
+ def GetLibraryClassList(self):
+ return self.LibraryClassList
+
+ def SetSourceFileList(self, SourceFileList):
+ self.SourceFileList = SourceFileList
+
+ def GetSourceFileList(self):
+ return self.SourceFileList
+
+ def SetBinaryFileList(self, BinaryFileList):
+ self.BinaryFileList = BinaryFileList
+
+ def GetBinaryFileList(self):
+ return self.BinaryFileList
+
+ def SetPackageDependencyList(self, PackageDependencyList):
+ self.PackageDependencyList = PackageDependencyList
+
+ def GetPackageDependencyList(self):
+ return self.PackageDependencyList
+
+ def SetPeiDepex(self, PeiDepex):
+ self.PeiDepex = PeiDepex
+
+ def GetPeiDepex(self):
+ return self.PeiDepex
+
+ def SetDxeDepex(self, DxeDepex):
+ self.DxeDepex = DxeDepex
+
+ def GetDxeDepex(self):
+ return self.DxeDepex
+
+ def SetSmmDepex(self, SmmDepex):
+ self.SmmDepex = SmmDepex
+
+ def GetSmmDepex(self):
+ return self.SmmDepex
+
+ def SetPpiList(self, PpiList):
+ self.PpiList = PpiList
+
+ def GetPpiList(self):
+ return self.PpiList
+
+ def SetProtocolList(self, ProtocolList):
+ self.ProtocolList = ProtocolList
+
+ def GetProtocolList(self):
+ return self.ProtocolList
+
+ def SetPcdList(self, PcdList):
+ self.PcdList = PcdList
+
+ def GetPcdList(self):
+ return self.PcdList
+
+ def SetGuidList(self, GuidList):
+ self.GuidList = GuidList
+
+ def GetGuidList(self):
+ return self.GuidList
+
+ def SetExternList(self, ExternList):
+ self.ExternList = ExternList
+
+ def GetExternList(self):
+ return self.ExternList
+
+ def SetBuildOptionList(self, BuildOptionList):
+ self.BuildOptionList = BuildOptionList
+
+ def GetBuildOptionList(self):
+ return self.BuildOptionList
+
+ def SetUserExtensionList(self, UserExtensionList):
+ self.UserExtensionList = UserExtensionList
+
+ def GetUserExtensionList(self):
+ return self.UserExtensionList
+
+ def SetMiscFileList(self, MiscFileList):
+ self.MiscFileList = MiscFileList
+
+ def GetMiscFileList(self):
+ return self.MiscFileList
+
+ def SetClonedFrom(self, ClonedFrom):
+ self.ClonedFrom = ClonedFrom
+
+ def GetClonedFrom(self):
+ return self.ClonedFrom
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/PackageObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/PackageObject.py
new file mode 100755
index 00000000..9d2db339
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/PackageObject.py
@@ -0,0 +1,192 @@
+## @file
+# This file is used to define a class object to describe a package
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+PackageObject
+'''
+
+##
+# Import Modules
+#
+from Object.POM.CommonObject import CommonPropertiesObject
+from Object.POM.CommonObject import IdentificationObject
+from Object.POM.CommonObject import CommonHeaderObject
+from Object.POM.CommonObject import BinaryHeaderObject
+from Library.Misc import Sdict
+
+## StandardIncludeFileObject
+#
+class StandardIncludeFileObject(CommonPropertiesObject):
+ def __init__(self):
+ CommonPropertiesObject.__init__(self)
+ self.IncludeFile = ''
+
+ def SetIncludeFile(self, IncludeFile):
+ self.IncludeFile = IncludeFile
+
+ def GetIncludeFile(self):
+ return self.IncludeFile
+
+## PackageIncludeFileObject
+#
+class PackageIncludeFileObject(StandardIncludeFileObject):
+ pass
+
+##
+# PackageObject
+#
+class PackageObject(IdentificationObject, CommonHeaderObject, BinaryHeaderObject):
+ def __init__(self):
+ IdentificationObject.__init__(self)
+ CommonHeaderObject.__init__(self)
+ BinaryHeaderObject.__init__(self)
+ #
+ # LibraryClassObject
+ #
+ self.LibraryClassList = []
+ #
+ # FileObject
+ #
+ self.IncludePathList = []
+ #
+ # StandardIncludeFileObject
+ #
+ self.StandardIncludeFileList = []
+ #
+ # PackageIncludeFileObject
+ #
+ self.PackageIncludeFileList = []
+ #
+ # Include and Arch List, item is (IncludePath, SupArchList-List of Arch), used during install package
+ #
+ self.IncludeArchList = []
+ #
+ # ProtocolObject
+ #
+ self.ProtocolList = []
+ #
+ # PpiObject
+ #
+ self.PpiList = []
+ #
+ # GuidObject
+ #
+ self.GuidList = []
+ #
+ # (PcdObject, PcdErrorObject)
+ #
+ self.PcdList = []
+ #
+ # {(PcdTokenSpaceGuidCName, PcdErrroNumber): PcdErrorMessageList}
+ #
+ self.PcdErrorCommentDict = {}
+ #
+ # UserExtensionObject
+ #
+ self.UserExtensionList = []
+ #
+ # MiscFileObject
+ #
+ self.MiscFileList = []
+ self.ModuleDict = Sdict()
+ #
+ # ClonedRecordObject
+ #
+ self.ClonedFromList = []
+ #
+ # string object
+ #
+ self.ModuleFileList = []
+
+ self.PcdChecks = []
+
+ self.UNIFlag = False
+
+ def SetLibraryClassList(self, LibraryClassList):
+ self.LibraryClassList = LibraryClassList
+
+ def GetLibraryClassList(self):
+ return self.LibraryClassList
+
+ def SetIncludePathList(self, IncludePathList):
+ self.IncludePathList = IncludePathList
+
+ def GetIncludePathList(self):
+ return self.IncludePathList
+
+ def SetIncludeArchList(self, IncludeArchList):
+ self.IncludeArchList = IncludeArchList
+
+ def GetIncludeArchList(self):
+ return self.IncludeArchList
+
+ def SetStandardIncludeFileList(self, StandardIncludeFileList):
+ self.StandardIncludeFileList = StandardIncludeFileList
+
+ def GetStandardIncludeFileList(self):
+ return self.StandardIncludeFileList
+
+ def SetPackageIncludeFileList(self, PackageIncludeFileList):
+ self.PackageIncludeFileList = PackageIncludeFileList
+
+ def GetPackageIncludeFileList(self):
+ return self.PackageIncludeFileList
+
+ def SetProtocolList(self, ProtocolList):
+ self.ProtocolList = ProtocolList
+
+ def GetProtocolList(self):
+ return self.ProtocolList
+
+ def SetPpiList(self, PpiList):
+ self.PpiList = PpiList
+
+ def GetPpiList(self):
+ return self.PpiList
+
+ def SetGuidList(self, GuidList):
+ self.GuidList = GuidList
+
+ def GetGuidList(self):
+ return self.GuidList
+
+ def SetPcdList(self, PcdList):
+ self.PcdList = PcdList
+
+ def GetPcdList(self):
+ return self.PcdList
+
+ def SetUserExtensionList(self, UserExtensionList):
+ self.UserExtensionList = UserExtensionList
+
+ def GetUserExtensionList(self):
+ return self.UserExtensionList
+
+ def SetMiscFileList(self, MiscFileList):
+ self.MiscFileList = MiscFileList
+
+ def GetMiscFileList(self):
+ return self.MiscFileList
+
+ def SetModuleDict(self, ModuleDict):
+ self.ModuleDict = ModuleDict
+
+ def GetModuleDict(self):
+ return self.ModuleDict
+
+ def SetClonedFromList(self, ClonedFromList):
+ self.ClonedFromList = ClonedFromList
+
+ def GetClonedFromList(self):
+ return self.ClonedFromList
+
+ def SetModuleFileList(self, ModuleFileList):
+ self.ModuleFileList = ModuleFileList
+
+ def GetModuleFileList(self):
+ return self.ModuleFileList
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/__init__.py
new file mode 100644
index 00000000..7f07f89f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/POM/__init__.py
@@ -0,0 +1,14 @@
+## @file
+# Python 'Object' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+POM
+'''
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/DecObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/DecObject.py
new file mode 100755
index 00000000..3e2919bb
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/DecObject.py
@@ -0,0 +1,605 @@
+## @file
+# This file is used to define class objects for DEC file. It will consumed by
+#DecParser
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+DecObject
+'''
+
+## Import modules
+#
+import os.path
+
+from Library.Misc import Sdict
+from Library.DataType import TAB_GUIDS
+from Library.DataType import TAB_PPIS
+from Library.DataType import TAB_PROTOCOLS
+from Library.DataType import TAB_DEC_DEFINES
+from Library.DataType import TAB_INCLUDES
+from Library.DataType import TAB_LIBRARY_CLASSES
+from Library.DataType import TAB_USER_EXTENSIONS
+from Library.DataType import TAB_PCDS
+from Library.DataType import TAB_ARCH_COMMON
+
+## _DecComments
+#
+# Base class for all data objects which have head and tail comments
+#
+class _DecComments:
+
+ ##constructor
+ #
+ def __init__(self):
+ self._HeadComment = []
+ self._TailComment = []
+
+ ## GetComments
+ #
+ def GetComments(self):
+ return self._HeadComment, self._TailComment
+
+ ## GetHeadComment
+ #
+ def GetHeadComment(self):
+ return self._HeadComment
+
+ ## SetHeadComment
+ #
+ # @param Comment: comment content
+ #
+ def SetHeadComment(self, Comment):
+ self._HeadComment = Comment
+
+ ## GetTailComment
+ #
+ def GetTailComment(self):
+ return self._TailComment
+
+ ## SetTailComment
+ #
+ # @param Comment: comment content
+ #
+ def SetTailComment(self, Comment):
+ self._TailComment = Comment
+
+## _DecBaseObject
+#
+# Base class that hold common info
+#
+class _DecBaseObject(_DecComments):
+ def __init__(self, PkgFullName):
+ _DecComments.__init__(self)
+ #
+ # Key is combined with (Arch, SectionType)
+ # Default is common
+ #
+ self.ValueDict = Sdict()
+ self._PkgFullName = PkgFullName
+ self._PackagePath, self._FileName = os.path.split(PkgFullName)
+ self._SecName = ''
+
+ ## GetSectionName
+ #
+ def GetSectionName(self):
+ return self._SecName
+
+ ## GetPackagePath
+ #
+ def GetPackagePath(self):
+ return self._PackagePath
+
+ ## GetPackageFile
+ #
+ def GetPackageFile(self):
+ return self._FileName
+
+ ## GetPackageFullName
+ #
+ def GetPackageFullName(self):
+ return self._PkgFullName
+
+ ## AddItem
+ # Add sub-item to current object, sub-class should override it if needed
+ #
+ # @param Item: Sub-item to be added
+ # @param Scope: A list store section name and arch info
+ #
+ def AddItem(self, Item, Scope):
+ if not Scope:
+ return
+ if not Item:
+ return
+ ArchModule = []
+ for Ele in Scope:
+ if Ele[1] in self.ValueDict:
+ self.ValueDict[Ele[1]].append(Item)
+ else:
+ self.ValueDict[Ele[1]] = [Item]
+ ArchModule.append(Ele[1])
+ Item.ArchAndModuleType = ArchModule
+
+ ## _GetItemByArch
+ # Helper class used by sub-class
+ # @param Arch: arch
+ #
+ def _GetItemByArch(self, Arch):
+ Arch = Arch.upper()
+ if Arch not in self.ValueDict:
+ return []
+ return self.ValueDict[Arch]
+
+ ## _GetAllItems
+ # Get all items, union all arches, items in returned list are unique
+ #
+ def _GetAllItems(self):
+ Retlst = []
+ for Arch in self.ValueDict:
+ for Item in self.ValueDict[Arch]:
+ if Item not in Retlst:
+ Retlst.append(Item)
+ return Retlst
+
+## _DecItemBaseObject
+#
+# Module type and arch the item belongs to
+#
+class _DecItemBaseObject(_DecComments):
+ def __init__(self):
+ _DecComments.__init__(self)
+ #
+ # Item's arch, if PCD, also include PCD type
+ #
+ self.ArchAndModuleType = []
+
+ ## GetArchList
+ #
+ def GetArchList(self):
+ ArchSet = set()
+ for Arch in self.ArchAndModuleType:
+ ArchSet.add(Arch)
+ return list(ArchSet)
+
+## DecDefineObject
+#
+# Class to hold define section information
+#
+class DecDefineObject(_DecBaseObject):
+ def __init__(self, PkgFullName):
+ _DecBaseObject.__init__(self, PkgFullName)
+ self._SecName = TAB_DEC_DEFINES.upper()
+ self._DecSpec = ''
+ self._PkgName = ''
+ self._PkgGuid = ''
+ self._PkgVersion = ''
+ self._PkgUniFile = ''
+
+ ## GetPackageSpecification
+ #
+ def GetPackageSpecification(self):
+ return self._DecSpec
+
+ def SetPackageSpecification(self, DecSpec):
+ self._DecSpec = DecSpec
+
+ ## GetPackageName
+ #
+ def GetPackageName(self):
+ return self._PkgName
+
+ def SetPackageName(self, PkgName):
+ self._PkgName = PkgName
+
+ ## GetPackageGuid
+ #
+ def GetPackageGuid(self):
+ return self._PkgGuid
+
+ def SetPackageGuid(self, PkgGuid):
+ self._PkgGuid = PkgGuid
+
+ ## GetPackageVersion
+ #
+ def GetPackageVersion(self):
+ return self._PkgVersion
+
+ def SetPackageVersion(self, PkgVersion):
+ self._PkgVersion = PkgVersion
+
+ ## GetPackageUniFile
+ #
+ def GetPackageUniFile(self):
+ return self._PkgUniFile
+
+ def SetPackageUniFile(self, PkgUniFile):
+ self._PkgUniFile = PkgUniFile
+
+ ## GetDefines
+ #
+ def GetDefines(self):
+ return self._GetItemByArch(TAB_ARCH_COMMON)
+
+ ## GetAllDefines
+ #
+ def GetAllDefines(self):
+ return self._GetAllItems()
+
+## DecDefineItemObject
+#
+# Each item of define section
+#
+class DecDefineItemObject(_DecItemBaseObject):
+ def __init__(self):
+ _DecItemBaseObject.__init__(self)
+ self.Key = ''
+ self.Value = ''
+
+ ## __hash__
+ #
+ def __hash__(self):
+ return hash(self.Key + self.Value)
+
+ ## __eq__
+ #
+ def __eq__(self, Other):
+ return id(self) == id(Other)
+
+ ## __str__
+ #
+ def __str__(self):
+ return str(self.ArchAndModuleType) + '\n' + self.Key + \
+ ' = ' + self.Value
+
+## DecIncludeObject
+#
+# Class to hold include section info
+#
+class DecIncludeObject(_DecBaseObject):
+ def __init__(self, PkgFullName):
+ _DecBaseObject.__init__(self, PkgFullName)
+ self._SecName = TAB_INCLUDES.upper()
+
+ ## GetIncludes
+ #
+ def GetIncludes(self, Arch=TAB_ARCH_COMMON):
+ return self._GetItemByArch(Arch)
+
+ ## GetAllIncludes
+ #
+ def GetAllIncludes(self):
+ return self._GetAllItems()
+
+## DecIncludeItemObject
+#
+# Item of include section
+#
+class DecIncludeItemObject(_DecItemBaseObject):
+ def __init__(self, File, Root):
+ self.File = File
+ self.Root = Root
+ _DecItemBaseObject.__init__(self)
+
+ ## __hash__
+ #
+ def __hash__(self):
+ return hash(self.File)
+
+ ## __eq__
+ #
+ def __eq__(self, Other):
+ return id(self) == id(Other)
+
+ ## __str__
+ #
+ def __str__(self):
+ return self.File
+
+## DecLibraryclassObject
+#
+# Class to hold library class section info
+#
+class DecLibraryclassObject(_DecBaseObject):
+ def __init__(self, PkgFullName):
+ _DecBaseObject.__init__(self, PkgFullName)
+ self._PackagePath, self._FileName = os.path.split(PkgFullName)
+ self._SecName = TAB_LIBRARY_CLASSES.upper()
+
+ ## GetLibraryclasses
+ #
+ def GetLibraryclasses(self, Arch=TAB_ARCH_COMMON):
+ return self._GetItemByArch(Arch)
+
+ ## GetAllLibraryclasses
+ #
+ def GetAllLibraryclasses(self):
+ return self._GetAllItems()
+
+## DecLibraryclassItemObject
+# Item of library class section
+#
+class DecLibraryclassItemObject(_DecItemBaseObject):
+ def __init__(self, Libraryclass, File, Root):
+ _DecItemBaseObject.__init__(self)
+ self.File = File
+ self.Root = Root
+ self.Libraryclass = Libraryclass
+
+ ## __hash__
+ #
+ def __hash__(self):
+ return hash(self.Libraryclass + self.File)
+
+ ## __eq__
+ #
+ def __eq__(self, Other):
+ return id(self) == id(Other)
+
+ ## __str__
+ #
+ def __str__(self):
+ return self.Libraryclass + '|' + self.File
+
+## DecPcdObject
+# Class to hold PCD section
+#
+class DecPcdObject(_DecBaseObject):
+ def __init__(self, PkgFullName):
+ _DecBaseObject.__init__(self, PkgFullName)
+ self._SecName = TAB_PCDS.upper()
+
+ ## AddItem
+ #
+ # Diff from base class
+ #
+ # @param Item: Item
+ # @param Scope: Scope
+ #
+ def AddItem(self, Item, Scope):
+ if not Scope:
+ return
+ if not Item:
+ return
+ ArchModule = []
+ for Type, Arch in Scope:
+ if (Type, Arch) in self.ValueDict:
+ self.ValueDict[Type, Arch].append(Item)
+ else:
+ self.ValueDict[Type, Arch] = [Item]
+ ArchModule.append([Type, Arch])
+ Item.ArchAndModuleType = ArchModule
+
+ ## GetPcds
+ #
+ # @param PcdType: PcdType
+ # @param Arch: Arch
+ #
+ def GetPcds(self, PcdType, Arch=TAB_ARCH_COMMON):
+ PcdType = PcdType.upper()
+ Arch = Arch.upper()
+ if (PcdType, Arch) not in self.ValueDict:
+ return []
+ return self.ValueDict[PcdType, Arch]
+
+ ## GetPcdsByType
+ #
+ # @param PcdType: PcdType
+ #
+ def GetPcdsByType(self, PcdType):
+ PcdType = PcdType.upper()
+ Retlst = []
+ for TypeInDict, Arch in self.ValueDict:
+ if TypeInDict != PcdType:
+ continue
+ for Item in self.ValueDict[PcdType, Arch]:
+ if Item not in Retlst:
+ Retlst.append(Item)
+ return Retlst
+
+## DecPcdItemObject
+#
+# Item of PCD section
+#
+# @param _DecItemBaseObject: _DecItemBaseObject object
+#
+class DecPcdItemObject(_DecItemBaseObject):
+ def __init__(self, Guid, Name, Value, DatumType,
+ Token, MaxDatumSize=''):
+ _DecItemBaseObject.__init__(self)
+ self.TokenCName = Name
+ self.TokenSpaceGuidCName = Guid
+ self.DatumType = DatumType
+ self.DefaultValue = Value
+ self.TokenValue = Token
+ self.MaxDatumSize = MaxDatumSize
+
+ ## __hash__
+ #
+ def __hash__(self):
+ return hash(self.TokenSpaceGuidCName + self.TokenCName)
+
+ ## __eq__
+ #
+ def __eq__(self, Other):
+ return id(self) == id(Other)
+
+ ## GetArchListOfType
+ #
+ # @param PcdType: PcdType
+ #
+ def GetArchListOfType(self, PcdType):
+ ItemSet = set()
+ PcdType = PcdType.upper()
+ for Type, Arch in self.ArchAndModuleType:
+ if Type != PcdType:
+ continue
+ ItemSet.add(Arch)
+ return list(ItemSet)
+
+## DecGuidObjectBase
+#
+# Base class for PPI, Protocol, and GUID.
+# Hold same data but has different method for clarification in sub-class
+#
+# @param _DecBaseObject: Dec Base Object
+#
+class DecGuidObjectBase(_DecBaseObject):
+ def __init__(self, PkgFullName):
+ _DecBaseObject.__init__(self, PkgFullName)
+
+ ## GetGuidStyleItems
+ #
+ # @param Arch: Arch
+ #
+ def GetGuidStyleItems(self, Arch=TAB_ARCH_COMMON):
+ return self._GetItemByArch(Arch)
+
+ ## GetGuidStyleAllItems
+ #
+ def GetGuidStyleAllItems(self):
+ return self._GetAllItems()
+
+## DecGuidItemObject
+#
+# Item of GUID, PPI and Protocol section
+#
+# @param _DecItemBaseObject: Dec Item Base Object
+#
+class DecGuidItemObject(_DecItemBaseObject):
+ def __init__(self, CName, GuidCValue, GuidString):
+ _DecItemBaseObject.__init__(self)
+ self.GuidCName = CName
+ self.GuidCValue = GuidCValue
+ self.GuidString = GuidString
+
+ ## __hash__
+ #
+ def __hash__(self):
+ return hash(self.GuidCName)
+
+ ## __eq__
+ #
+ def __eq__(self, Other):
+ return id(self) == id(Other)
+
+ ## __str__
+ #
+ def __str__(self):
+ return self.GuidCName + ' = ' + self.GuidCValue
+
+## DecGuidObject
+#
+# Class for GUID section
+#
+# @param DecGuidObjectBase: Dec Guid Object Base
+#
+class DecGuidObject(DecGuidObjectBase):
+ def __init__(self, PkgFullName):
+ DecGuidObjectBase.__init__(self, PkgFullName)
+ self._SecName = TAB_GUIDS.upper()
+
+ ## GetGuids
+ #
+ # @param Arch: Arch
+ #
+ def GetGuids(self, Arch=TAB_ARCH_COMMON):
+ return self._GetItemByArch(Arch)
+
+ ## GetAllGuids
+ #
+ def GetAllGuids(self):
+ return self._GetAllItems()
+
+## DecPpiObject
+#
+# Class for PPI section
+#
+# @param DecGuidObjectBase: Dec Guid Object Base
+#
+class DecPpiObject(DecGuidObjectBase):
+ def __init__(self, PkgFullName):
+ DecGuidObjectBase.__init__(self, PkgFullName)
+ self._SecName = TAB_PPIS.upper()
+
+ ## GetPpis
+ #
+ # @param Arch: Arch
+ #
+ def GetPpis(self, Arch=TAB_ARCH_COMMON):
+ return self._GetItemByArch(Arch)
+
+ ## GetAllPpis
+ #
+ def GetAllPpis(self):
+ return self._GetAllItems()
+
+## DecProtocolObject
+#
+# Class for protocol section
+#
+# @param DecGuidObjectBase: Dec Guid Object Base
+#
+class DecProtocolObject(DecGuidObjectBase):
+ def __init__(self, PkgFullName):
+ DecGuidObjectBase.__init__(self, PkgFullName)
+ self._SecName = TAB_PROTOCOLS.upper()
+
+ ## GetProtocols
+ #
+ # @param Arch: Arch
+ #
+ def GetProtocols(self, Arch=TAB_ARCH_COMMON):
+ return self._GetItemByArch(Arch)
+
+ ## GetAllProtocols
+ #
+ def GetAllProtocols(self):
+ return self._GetAllItems()
+
+## DecUserExtensionObject
+#
+# Class for user extension section
+#
+# @param _DecBaseObject: Dec Guid Object Base
+#
+class DecUserExtensionObject(_DecBaseObject):
+ def __init__(self, PkgFullName):
+ _DecBaseObject.__init__(self, PkgFullName)
+ self._SecName = TAB_USER_EXTENSIONS.upper()
+ self.ItemList = []
+
+ ## GetProtocols
+ #
+ # @param Item: Item
+ # @param Scope: Scope
+ #
+ def AddItem(self, Item, Scope):
+ if not Scope:
+ pass
+ if not Item:
+ return
+ self.ItemList.append(Item)
+
+ ## GetAllUserExtensions
+ #
+ def GetAllUserExtensions(self):
+ return self.ItemList
+
+
+## DecUserExtensionItemObject
+# Item for user extension section
+#
+# @param _DecItemBaseObject: Dec Item Base Object
+#
+class DecUserExtensionItemObject(_DecItemBaseObject):
+ def __init__(self):
+ _DecItemBaseObject.__init__(self)
+ self.UserString = ''
+ self.UserId = ''
+ self.IdString = ''
+
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py
new file mode 100755
index 00000000..70cbc54c
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py
@@ -0,0 +1,686 @@
+## @file
+# This file is used to define class objects of INF file [Binaries] section.
+# It will consumed by InfParser.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfBinaryObject
+'''
+
+import os
+
+from copy import deepcopy
+from Library import DataType as DT
+from Library import GlobalData
+import Logger.Log as Logger
+from Logger import ToolError
+from Logger import StringTable as ST
+from Library.Misc import Sdict
+
+from Object.Parser.InfCommonObject import InfSectionCommonDef
+from Object.Parser.InfCommonObject import CurrentLine
+from Library.Misc import ConvPathFromAbsToRel
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+from Library.Misc import ValidFile
+from Library.ParserValidate import IsValidPath
+
+
+class InfBianryItem():
+ def __init__(self):
+ self.FileName = ''
+ self.Target = ''
+ self.FeatureFlagExp = ''
+ self.HelpString = ''
+ self.Type = ''
+ self.SupArchList = []
+
+ def SetFileName(self, FileName):
+ self.FileName = FileName
+ def GetFileName(self):
+ return self.FileName
+
+ def SetTarget(self, Target):
+ self.Target = Target
+ def GetTarget(self):
+ return self.Target
+
+ def SetFeatureFlagExp(self, FeatureFlagExp):
+ self.FeatureFlagExp = FeatureFlagExp
+ def GetFeatureFlagExp(self):
+ return self.FeatureFlagExp
+
+ def SetHelpString(self, HelpString):
+ self.HelpString = HelpString
+ def GetHelpString(self):
+ return self.HelpString
+
+ def SetType(self, Type):
+ self.Type = Type
+ def GetType(self):
+ return self.Type
+ def SetSupArchList(self, SupArchList):
+ self.SupArchList = SupArchList
+ def GetSupArchList(self):
+ return self.SupArchList
+
+class InfBianryVerItem(InfBianryItem, CurrentLine):
+ def __init__(self):
+ InfBianryItem.__init__(self)
+ CurrentLine.__init__(self)
+ self.VerTypeName = ''
+
+ def SetVerTypeName(self, VerTypeName):
+ self.VerTypeName = VerTypeName
+ def GetVerTypeName(self):
+ return self.VerTypeName
+
+class InfBianryUiItem(InfBianryItem, CurrentLine):
+ def __init__(self):
+ InfBianryItem.__init__(self)
+ CurrentLine.__init__(self)
+ self.UiTypeName = ''
+
+ def SetUiTypeName(self, UiTypeName):
+ self.UiTypeName = UiTypeName
+ def GetVerTypeName(self):
+ return self.UiTypeName
+
+class InfBianryCommonItem(InfBianryItem, CurrentLine):
+ def __init__(self):
+ self.CommonType = ''
+ self.TagName = ''
+ self.Family = ''
+ self.GuidValue = ''
+ InfBianryItem.__init__(self)
+ CurrentLine.__init__(self)
+
+ def SetCommonType(self, CommonType):
+ self.CommonType = CommonType
+ def GetCommonType(self):
+ return self.CommonType
+
+ def SetTagName(self, TagName):
+ self.TagName = TagName
+ def GetTagName(self):
+ return self.TagName
+
+ def SetFamily(self, Family):
+ self.Family = Family
+ def GetFamily(self):
+ return self.Family
+
+ def SetGuidValue(self, GuidValue):
+ self.GuidValue = GuidValue
+ def GetGuidValue(self):
+ return self.GuidValue
+
+##
+#
+#
+#
+class InfBinariesObject(InfSectionCommonDef):
+ def __init__(self):
+ self.Binaries = Sdict()
+ #
+ # Macro defined in this section should be only used in this section.
+ #
+ self.Macros = {}
+ InfSectionCommonDef.__init__(self)
+
+ ## CheckVer
+ #
+ #
+ def CheckVer(self, Ver, __SupArchList):
+ #
+ # Check Ver
+ #
+ for VerItem in Ver:
+ IsValidFileFlag = False
+ VerContent = VerItem[0]
+ VerComment = VerItem[1]
+ VerCurrentLine = VerItem[2]
+ GlobalData.gINF_CURRENT_LINE = VerCurrentLine
+ InfBianryVerItemObj = None
+ #
+ # Should not less than 2 elements
+ #
+ if len(VerContent) < 2:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID % (VerContent[0], 2),
+ File=VerCurrentLine.GetFileName(),
+ Line=VerCurrentLine.GetLineNo(),
+ ExtraData=VerCurrentLine.GetLineString())
+ return False
+ if len(VerContent) > 4:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID_MAX % (VerContent[0], 4),
+ File=VerCurrentLine.GetFileName(),
+ Line=VerCurrentLine.GetLineNo(),
+ ExtraData=VerCurrentLine.GetLineString())
+ return False
+ if len(VerContent) >= 2:
+ #
+ # Create a Ver Object.
+ #
+ InfBianryVerItemObj = InfBianryVerItem()
+
+ if VerContent[0] != DT.BINARY_FILE_TYPE_VER:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_VER_TYPE % DT.BINARY_FILE_TYPE_VER,
+ File=VerCurrentLine.GetFileName(),
+ Line=VerCurrentLine.GetLineNo(),
+ ExtraData=VerCurrentLine.GetLineString())
+
+ InfBianryVerItemObj.SetVerTypeName(VerContent[0])
+ InfBianryVerItemObj.SetType(VerContent[0])
+ #
+ # Verify File exist or not
+ #
+ FullFileName = os.path.normpath(os.path.realpath(os.path.join(GlobalData.gINF_MODULE_DIR,
+ VerContent[1])))
+ if not (ValidFile(FullFileName) or ValidFile(VerContent[1])):
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_ITEM_FILE_NOT_EXIST % (VerContent[1]),
+ File=VerCurrentLine.GetFileName(),
+ Line=VerCurrentLine.GetLineNo(),
+ ExtraData=VerCurrentLine.GetLineString())
+ #
+ # Validate file exist/format.
+ #
+ if IsValidPath(VerContent[1], GlobalData.gINF_MODULE_DIR):
+ IsValidFileFlag = True
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID % (VerContent[1]),
+ File=VerCurrentLine.GetFileName(),
+ Line=VerCurrentLine.GetLineNo(),
+ ExtraData=VerCurrentLine.GetLineString())
+ return False
+ if IsValidFileFlag:
+ VerContent[0] = ConvPathFromAbsToRel(VerContent[0],
+ GlobalData.gINF_MODULE_DIR)
+ InfBianryVerItemObj.SetFileName(VerContent[1])
+ if len(VerContent) >= 3:
+ #
+ # Add Target information
+ #
+ InfBianryVerItemObj.SetTarget(VerContent[2])
+ if len(VerContent) == 4:
+ if VerContent[3].strip() == '':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
+ File=VerCurrentLine.GetFileName(),
+ Line=VerCurrentLine.GetLineNo(),
+ ExtraData=VerCurrentLine.GetLineString())
+ #
+ # Validate Feature Flag Express
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(VerContent[3].\
+ strip())
+ if not FeatureFlagRtv[0]:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID % (FeatureFlagRtv[1]),
+ File=VerCurrentLine.GetFileName(),
+ Line=VerCurrentLine.GetLineNo(),
+ ExtraData=VerCurrentLine.GetLineString())
+ InfBianryVerItemObj.SetFeatureFlagExp(VerContent[3])
+
+ InfBianryVerItemObj.SetSupArchList(__SupArchList)
+
+ #
+ # Determine binary file name duplicate. Follow below rule:
+ #
+ # A binary filename must not be duplicated within
+ # a [Binaries] section. A binary filename may appear in
+ # multiple architectural [Binaries] sections. A binary
+ # filename listed in an architectural [Binaries] section
+ # must not be listed in the common architectural
+ # [Binaries] section.
+ #
+ # NOTE: This check will not report error now.
+ #
+ for Item in self.Binaries:
+ if Item.GetFileName() == InfBianryVerItemObj.GetFileName():
+ ItemSupArchList = Item.GetSupArchList()
+ for ItemArch in ItemSupArchList:
+ for VerItemObjArch in __SupArchList:
+ if ItemArch == VerItemObjArch:
+ #
+ # ST.ERR_INF_PARSER_ITEM_DUPLICATE
+ #
+ pass
+ if ItemArch.upper() == 'COMMON' or VerItemObjArch.upper() == 'COMMON':
+ #
+ # ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
+ #
+ pass
+
+ if InfBianryVerItemObj is not None:
+ if (InfBianryVerItemObj) in self.Binaries:
+ BinariesList = self.Binaries[InfBianryVerItemObj]
+ BinariesList.append((InfBianryVerItemObj, VerComment))
+ self.Binaries[InfBianryVerItemObj] = BinariesList
+ else:
+ BinariesList = []
+ BinariesList.append((InfBianryVerItemObj, VerComment))
+ self.Binaries[InfBianryVerItemObj] = BinariesList
+
+ ## ParseCommonBinary
+ #
+ # ParseCommonBinary
+ #
+ def ParseCommonBinary(self, CommonBinary, __SupArchList):
+ #
+ # Check common binary definitions
+ # Type | FileName | Target | Family | TagName | FeatureFlagExp
+ #
+ for Item in CommonBinary:
+ IsValidFileFlag = False
+ ItemContent = Item[0]
+ ItemComment = Item[1]
+ CurrentLineOfItem = Item[2]
+ GlobalData.gINF_CURRENT_LINE = CurrentLineOfItem
+ InfBianryCommonItemObj = None
+ if ItemContent[0] == 'SUBTYPE_GUID':
+ if len(ItemContent) < 3:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID % (ItemContent[0], 3),
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+ return False
+ else:
+ if len(ItemContent) < 2:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID % (ItemContent[0], 2),
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+ return False
+
+ if len(ItemContent) > 7:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID_MAX % (ItemContent[0], 7),
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+ return False
+ if len(ItemContent) >= 2:
+ #
+ # Create a Common Object.
+ #
+ InfBianryCommonItemObj = InfBianryCommonItem()
+ #
+ # Convert Binary type.
+ #
+ BinaryFileType = ItemContent[0].strip()
+ if BinaryFileType == 'RAW' or BinaryFileType == 'ACPI' or BinaryFileType == 'ASL':
+ BinaryFileType = 'BIN'
+
+ if BinaryFileType not in DT.BINARY_FILE_TYPE_LIST:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_ITEM_INVALID_FILETYPE % \
+ (DT.BINARY_FILE_TYPE_LIST.__str__()),
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+
+ if BinaryFileType == 'SUBTYPE_GUID':
+ BinaryFileType = 'FREEFORM'
+
+ if BinaryFileType == 'LIB' or BinaryFileType == 'UEFI_APP':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_ITEM_INVALID_FILETYPE % \
+ (DT.BINARY_FILE_TYPE_LIST.__str__()),
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+
+ InfBianryCommonItemObj.SetType(BinaryFileType)
+ InfBianryCommonItemObj.SetCommonType(ItemContent[0])
+ FileName = ''
+ if BinaryFileType == 'FREEFORM':
+ InfBianryCommonItemObj.SetGuidValue(ItemContent[1])
+ if len(ItemContent) >= 3:
+ FileName = ItemContent[2]
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_ITEM_FILENAME_NOT_EXIST,
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+ else:
+ FileName = ItemContent[1]
+ #
+ # Verify File exist or not
+ #
+ FullFileName = os.path.normpath(os.path.realpath(os.path.join(GlobalData.gINF_MODULE_DIR,
+ FileName)))
+ if not (ValidFile(FullFileName) or ValidFile(FileName)):
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_ITEM_FILE_NOT_EXIST % (FileName),
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+ #
+ # Validate file exist/format.
+ #
+ if IsValidPath(FileName, GlobalData.gINF_MODULE_DIR):
+ IsValidFileFlag = True
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID % (FileName),
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+ return False
+ if IsValidFileFlag:
+ ItemContent[0] = ConvPathFromAbsToRel(ItemContent[0], GlobalData.gINF_MODULE_DIR)
+ InfBianryCommonItemObj.SetFileName(FileName)
+ if len(ItemContent) >= 3:
+ #
+ # Add Target information
+ #
+ if BinaryFileType != 'FREEFORM':
+ InfBianryCommonItemObj.SetTarget(ItemContent[2])
+
+ if len(ItemContent) >= 4:
+ #
+ # Add Family information
+ #
+ if BinaryFileType != 'FREEFORM':
+ InfBianryCommonItemObj.SetFamily(ItemContent[3])
+ else:
+ InfBianryCommonItemObj.SetTarget(ItemContent[3])
+
+ if len(ItemContent) >= 5:
+ #
+ # TagName entries are build system specific. If there
+ # is content in the entry, the tool must exit
+ # gracefully with an error message that indicates build
+ # system specific content cannot be distributed using
+ # the UDP
+ #
+ if BinaryFileType != 'FREEFORM':
+ if ItemContent[4].strip() != '':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_TAGNAME_NOT_PERMITTED % (ItemContent[4]),
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+ else:
+ InfBianryCommonItemObj.SetFamily(ItemContent[4])
+
+ if len(ItemContent) >= 6:
+ #
+ # Add FeatureFlagExp
+ #
+ if BinaryFileType != 'FREEFORM':
+ if ItemContent[5].strip() == '':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+ #
+ # Validate Feature Flag Express
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(ItemContent[5].strip())
+ if not FeatureFlagRtv[0]:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID % (FeatureFlagRtv[1]),
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+ InfBianryCommonItemObj.SetFeatureFlagExp(ItemContent[5])
+ else:
+ if ItemContent[5].strip() != '':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_TAGNAME_NOT_PERMITTED % (ItemContent[5]),
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+
+ if len(ItemContent) == 7:
+ if ItemContent[6].strip() == '':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+ #
+ # Validate Feature Flag Express
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(ItemContent[6].strip())
+ if not FeatureFlagRtv[0]:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID % (FeatureFlagRtv[1]),
+ File=CurrentLineOfItem.GetFileName(),
+ Line=CurrentLineOfItem.GetLineNo(),
+ ExtraData=CurrentLineOfItem.GetLineString())
+ InfBianryCommonItemObj.SetFeatureFlagExp(ItemContent[6])
+
+ InfBianryCommonItemObj.SetSupArchList(__SupArchList)
+
+ #
+ # Determine binary file name duplicate. Follow below rule:
+ #
+ # A binary filename must not be duplicated within
+ # a [Binaries] section. A binary filename may appear in
+ # multiple architectural [Binaries] sections. A binary
+ # filename listed in an architectural [Binaries] section
+ # must not be listed in the common architectural
+ # [Binaries] section.
+ #
+ # NOTE: This check will not report error now.
+ #
+# for Item in self.Binaries:
+# if Item.GetFileName() == InfBianryCommonItemObj.GetFileName():
+# ItemSupArchList = Item.GetSupArchList()
+# for ItemArch in ItemSupArchList:
+# for ComItemObjArch in __SupArchList:
+# if ItemArch == ComItemObjArch:
+# #
+# # ST.ERR_INF_PARSER_ITEM_DUPLICATE
+# #
+# pass
+#
+# if ItemArch.upper() == 'COMMON' or ComItemObjArch.upper() == 'COMMON':
+# #
+# # ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
+# #
+# pass
+
+ if InfBianryCommonItemObj is not None:
+ if (InfBianryCommonItemObj) in self.Binaries:
+ BinariesList = self.Binaries[InfBianryCommonItemObj]
+ BinariesList.append((InfBianryCommonItemObj, ItemComment))
+ self.Binaries[InfBianryCommonItemObj] = BinariesList
+ else:
+ BinariesList = []
+ BinariesList.append((InfBianryCommonItemObj, ItemComment))
+ self.Binaries[InfBianryCommonItemObj] = BinariesList
+
+ def SetBinary(self, UiInf=None, Ver=None, CommonBinary=None, ArchList=None):
+
+ __SupArchList = []
+ for ArchItem in ArchList:
+ #
+ # Validate Arch
+ #
+ if (ArchItem == '' or ArchItem is None):
+ ArchItem = 'COMMON'
+ __SupArchList.append(ArchItem)
+
+ if UiInf is not None:
+ if len(UiInf) > 0:
+ #
+ # Check UI
+ #
+ for UiItem in UiInf:
+ IsValidFileFlag = False
+ InfBianryUiItemObj = None
+ UiContent = UiItem[0]
+ UiComment = UiItem[1]
+ UiCurrentLine = UiItem[2]
+ GlobalData.gINF_CURRENT_LINE = deepcopy(UiItem[2])
+ #
+ # Should not less than 2 elements
+ #
+ if len(UiContent) < 2:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID % (UiContent[0], 2),
+ File=UiCurrentLine.GetFileName(),
+ Line=UiCurrentLine.GetLineNo(),
+ ExtraData=UiCurrentLine.GetLineString())
+ return False
+
+ if len(UiContent) > 4:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID_MAX % (UiContent[0], 4),
+ File=UiCurrentLine.GetFileName(),
+ Line=UiCurrentLine.GetLineNo(),
+ ExtraData=UiCurrentLine.GetLineString())
+ return False
+ if len(UiContent) >= 2:
+ #
+ # Create an Ui Object.
+ #
+ InfBianryUiItemObj = InfBianryUiItem()
+ if UiContent[0] != 'UI':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_VER_TYPE % ('UI'),
+ File=UiCurrentLine.GetFileName(),
+ Line=UiCurrentLine.GetLineNo(),
+ ExtraData=UiCurrentLine.GetLineString())
+ InfBianryUiItemObj.SetUiTypeName(UiContent[0])
+ InfBianryUiItemObj.SetType(UiContent[0])
+ #
+ # Verify File exist or not
+ #
+ FullFileName = os.path.normpath(os.path.realpath(os.path.join(GlobalData.gINF_MODULE_DIR,
+ UiContent[1])))
+ if not (ValidFile(FullFileName) or ValidFile(UiContent[1])):
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BINARY_ITEM_FILE_NOT_EXIST % (UiContent[1]),
+ File=UiCurrentLine.GetFileName(),
+ Line=UiCurrentLine.GetLineNo(),
+ ExtraData=UiCurrentLine.GetLineString())
+ #
+ # Validate file exist/format.
+ #
+ if IsValidPath(UiContent[1], GlobalData.gINF_MODULE_DIR):
+ IsValidFileFlag = True
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID % (UiContent[1]),
+ File=UiCurrentLine.GetFileName(),
+ Line=UiCurrentLine.GetLineNo(),
+ ExtraData=UiCurrentLine.GetLineString())
+ return False
+ if IsValidFileFlag:
+ UiContent[0] = ConvPathFromAbsToRel(UiContent[0], GlobalData.gINF_MODULE_DIR)
+ InfBianryUiItemObj.SetFileName(UiContent[1])
+ if len(UiContent) >= 3:
+ #
+ # Add Target information
+ #
+ InfBianryUiItemObj.SetTarget(UiContent[2])
+ if len(UiContent) == 4:
+ if UiContent[3].strip() == '':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
+ File=UiCurrentLine.GetFileName(),
+ Line=UiCurrentLine.GetLineNo(),
+ ExtraData=UiCurrentLine.GetLineString())
+ #
+ # Validate Feature Flag Express
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(UiContent[3].strip())
+ if not FeatureFlagRtv[0]:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID % (FeatureFlagRtv[1]),
+ File=UiCurrentLine.GetFileName(),
+ Line=UiCurrentLine.GetLineNo(),
+ ExtraData=UiCurrentLine.GetLineString())
+ InfBianryUiItemObj.SetFeatureFlagExp(UiContent[3])
+
+ InfBianryUiItemObj.SetSupArchList(__SupArchList)
+
+ #
+ # Determine binary file name duplicate. Follow below rule:
+ #
+ # A binary filename must not be duplicated within
+ # a [Binaries] section. A binary filename may appear in
+ # multiple architectural [Binaries] sections. A binary
+ # filename listed in an architectural [Binaries] section
+ # must not be listed in the common architectural
+ # [Binaries] section.
+ #
+ # NOTE: This check will not report error now.
+ #
+# for Item in self.Binaries:
+# if Item.GetFileName() == InfBianryUiItemObj.GetFileName():
+# ItemSupArchList = Item.GetSupArchList()
+# for ItemArch in ItemSupArchList:
+# for UiItemObjArch in __SupArchList:
+# if ItemArch == UiItemObjArch:
+# #
+# # ST.ERR_INF_PARSER_ITEM_DUPLICATE
+# #
+# pass
+# if ItemArch.upper() == 'COMMON' or UiItemObjArch.upper() == 'COMMON':
+# #
+# # ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
+# #
+# pass
+
+ if InfBianryUiItemObj is not None:
+ if (InfBianryUiItemObj) in self.Binaries:
+ BinariesList = self.Binaries[InfBianryUiItemObj]
+ BinariesList.append((InfBianryUiItemObj, UiComment))
+ self.Binaries[InfBianryUiItemObj] = BinariesList
+ else:
+ BinariesList = []
+ BinariesList.append((InfBianryUiItemObj, UiComment))
+ self.Binaries[InfBianryUiItemObj] = BinariesList
+ if Ver is not None and len(Ver) > 0:
+ self.CheckVer(Ver, __SupArchList)
+ if CommonBinary and len(CommonBinary) > 0:
+ self.ParseCommonBinary(CommonBinary, __SupArchList)
+
+ return True
+
+ def GetBinary(self):
+ return self.Binaries
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfBuildOptionObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfBuildOptionObject.py
new file mode 100755
index 00000000..1638e533
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfBuildOptionObject.py
@@ -0,0 +1,87 @@
+## @file
+# This file is used to define class objects of INF file [BuildOptions] section.
+# It will consumed by InfParser.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfBuildOptionObject
+'''
+
+from Library import GlobalData
+
+from Object.Parser.InfCommonObject import InfSectionCommonDef
+
+class InfBuildOptionItem():
+ def __init__(self):
+ self.Content = ''
+ self.SupArchList = []
+ self.AsBuildList = []
+
+ def SetContent(self, Content):
+ self.Content = Content
+ def GetContent(self):
+ return self.Content
+
+ def SetSupArchList(self, SupArchList):
+ self.SupArchList = SupArchList
+ def GetSupArchList(self):
+ return self.SupArchList
+
+ #
+ # AsBuild Information
+ #
+ def SetAsBuildList(self, AsBuildList):
+ self.AsBuildList = AsBuildList
+ def GetAsBuildList(self):
+ return self.AsBuildList
+
+
+## INF BuildOption section
+# Macro define is not permitted for this section.
+#
+#
+class InfBuildOptionsObject(InfSectionCommonDef):
+ def __init__(self):
+ self.BuildOptions = []
+ InfSectionCommonDef.__init__(self)
+ ## SetBuildOptions function
+ #
+ # For BuildOptionName, need to validate its format
+ # For BuildOptionValue, just ignore it.
+ #
+ # @param Arch Indicated which arch of build options belong to.
+ # @param BuildOptCont A list contain BuildOption related information.
+ # The element in the list contain 3 members.
+ # BuildOptionName, BuildOptionValue and IsReplace
+ # flag.
+ #
+ # @return True Build options set/validate successfully
+ # @return False Build options set/validate failed
+ #
+ def SetBuildOptions(self, BuildOptCont, ArchList = None, SectionContent = ''):
+
+ if not GlobalData.gIS_BINARY_INF:
+
+ if SectionContent.strip() != '':
+ InfBuildOptionItemObj = InfBuildOptionItem()
+ InfBuildOptionItemObj.SetContent(SectionContent)
+ InfBuildOptionItemObj.SetSupArchList(ArchList)
+
+ self.BuildOptions.append(InfBuildOptionItemObj)
+ else:
+ #
+ # For AsBuild INF file
+ #
+ if len(BuildOptCont) >= 1:
+ InfBuildOptionItemObj = InfBuildOptionItem()
+ InfBuildOptionItemObj.SetAsBuildList(BuildOptCont)
+ InfBuildOptionItemObj.SetSupArchList(ArchList)
+ self.BuildOptions.append(InfBuildOptionItemObj)
+
+ return True
+
+ def GetBuildOptions(self):
+ return self.BuildOptions
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfCommonObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfCommonObject.py
new file mode 100755
index 00000000..4e6c6f7a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfCommonObject.py
@@ -0,0 +1,156 @@
+## @file
+# This file is used to define common class objects for INF file.
+# It will consumed by InfParser
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfCommonObject
+'''
+
+## InfLineCommentObject
+#
+# Comment Object for any line in the INF file
+#
+# #
+# # HeaderComment
+# #
+# Line # TailComment
+#
+class InfLineCommentObject():
+ def __init__(self):
+ self.HeaderComments = ''
+ self.TailComments = ''
+
+ def SetHeaderComments(self, HeaderComments):
+ self.HeaderComments = HeaderComments
+
+ def GetHeaderComments(self):
+ return self.HeaderComments
+
+ def SetTailComments(self, TailComments):
+ self.TailComments = TailComments
+
+ def GetTailComments(self):
+ return self.TailComments
+
+## CurrentLine
+#
+class CurrentLine():
+ def __init__(self):
+ self.LineNo = ''
+ self.LineString = ''
+ self.FileName = ''
+
+ ## SetLineNo
+ #
+ # @param LineNo: LineNo
+ #
+ def SetLineNo(self, LineNo):
+ self.LineNo = LineNo
+
+ ## GetLineNo
+ #
+ def GetLineNo(self):
+ return self.LineNo
+
+ ## SetLineString
+ #
+ # @param LineString: Line String content
+ #
+ def SetLineString(self, LineString):
+ self.LineString = LineString
+
+ ## GetLineString
+ #
+ def GetLineString(self):
+ return self.LineString
+
+ ## SetFileName
+ #
+ # @param FileName: File Name
+ #
+ def SetFileName(self, FileName):
+ self.FileName = FileName
+
+ ## GetFileName
+ #
+ def GetFileName(self):
+ return self.FileName
+
+##
+# Inf Section common data
+#
+class InfSectionCommonDef():
+ def __init__(self):
+ #
+ # #
+ # # HeaderComments at here
+ # #
+ # [xxSection] TailComments at here
+ # data
+ #
+ self.HeaderComments = ''
+ self.TailComments = ''
+ #
+ # The support arch list of this section
+ #
+ self.SupArchList = []
+
+ #
+ # Store all section content
+ # Key is supported Arch
+ #
+ self.AllContent = {}
+
+ ## SetHeaderComments
+ #
+ # @param HeaderComments: HeaderComments
+ #
+ def SetHeaderComments(self, HeaderComments):
+ self.HeaderComments = HeaderComments
+
+ ## GetHeaderComments
+ #
+ def GetHeaderComments(self):
+ return self.HeaderComments
+
+ ## SetTailComments
+ #
+ # @param TailComments: TailComments
+ #
+ def SetTailComments(self, TailComments):
+ self.TailComments = TailComments
+
+ ## GetTailComments
+ #
+ def GetTailComments(self):
+ return self.TailComments
+
+ ## SetSupArchList
+ #
+ # @param Arch: Arch
+ #
+ def SetSupArchList(self, Arch):
+ if Arch not in self.SupArchList:
+ self.SupArchList.append(Arch)
+
+ ## GetSupArchList
+ #
+ def GetSupArchList(self):
+ return self.SupArchList
+
+ ## SetAllContent
+ #
+ # @param ArchList: ArchList
+ # @param Content: Content
+ #
+ def SetAllContent(self, Content):
+ self.AllContent = Content
+
+ ## GetAllContent
+ #
+ def GetAllContent(self):
+ return self.AllContent
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfDefineCommonObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfDefineCommonObject.py
new file mode 100755
index 00000000..05844834
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfDefineCommonObject.py
@@ -0,0 +1,83 @@
+## @file
+# This file is used to define common class objects of [Defines] section for INF file.
+# It will consumed by InfParser
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfDefineCommonObject
+'''
+
+from Object.Parser.InfCommonObject import InfLineCommentObject
+
+## InfDefineImageExeParamItem
+#
+class InfDefineImageExeParamItem():
+ def __init__(self):
+ self.CName = ''
+ self.FeatureFlagExp = ''
+ self.Comments = InfLineCommentObject()
+
+ def SetCName(self, CName):
+ self.CName = CName
+ def GetCName(self):
+ return self.CName
+ def SetFeatureFlagExp(self, FeatureFlagExp):
+ self.FeatureFlagExp = FeatureFlagExp
+ def GetFeatureFlagExp(self):
+ return self.FeatureFlagExp
+
+## InfDefineEntryPointItem
+#
+class InfDefineEntryPointItem(InfDefineImageExeParamItem):
+ def __init__(self):
+ InfDefineImageExeParamItem.__init__(self)
+
+## InfDefineUnloadImageItem
+#
+class InfDefineUnloadImageItem(InfDefineImageExeParamItem):
+ def __init__(self):
+ InfDefineImageExeParamItem.__init__(self)
+
+## InfDefineConstructorItem
+#
+class InfDefineConstructorItem(InfDefineImageExeParamItem):
+ def __init__(self):
+ InfDefineImageExeParamItem.__init__(self)
+ self.SupModList = []
+
+ def SetSupModList(self, SupModList):
+ self.SupModList = SupModList
+ def GetSupModList(self):
+ return self.SupModList
+
+## InfDefineDestructorItem
+#
+class InfDefineDestructorItem(InfDefineImageExeParamItem):
+ def __init__(self):
+ InfDefineImageExeParamItem.__init__(self)
+ self.SupModList = []
+
+ def SetSupModList(self, SupModList):
+ self.SupModList = SupModList
+ def GetSupModList(self):
+ return self.SupModList
+
+## InfDefineLibraryItem
+#
+class InfDefineLibraryItem():
+ def __init__(self):
+ self.LibraryName = ''
+ self.Types = []
+ self.Comments = InfLineCommentObject()
+
+ def SetLibraryName(self, Name):
+ self.LibraryName = Name
+ def GetLibraryName(self):
+ return self.LibraryName
+ def SetTypes(self, Type):
+ self.Types = Type
+ def GetTypes(self):
+ return self.Types
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py
new file mode 100755
index 00000000..bd3e06e4
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py
@@ -0,0 +1,1002 @@
+## @file
+# This file is used to define class objects of [Defines] section for INF file.
+# It will consumed by InfParser
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfDefineObject
+'''
+
+import os
+import re
+
+from Logger import StringTable as ST
+from Logger import ToolError
+from Library import GlobalData
+from Library import DataType as DT
+from Library.StringUtils import GetSplitValueList
+from Library.Misc import CheckGuidRegFormat
+from Library.Misc import Sdict
+from Library.Misc import ConvPathFromAbsToRel
+from Library.Misc import ValidateUNIFilePath
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+from Library.ParserValidate import IsValidWord
+from Library.ParserValidate import IsValidInfMoudleType
+from Library.ParserValidate import IsValidHex
+from Library.ParserValidate import IsValidHexVersion
+from Library.ParserValidate import IsValidDecVersion
+from Library.ParserValidate import IsValidCVariableName
+from Library.ParserValidate import IsValidBoolType
+from Library.ParserValidate import IsValidPath
+from Library.ParserValidate import IsValidFamily
+from Library.ParserValidate import IsValidIdentifier
+from Library.ParserValidate import IsValidDecVersionVal
+from Object.Parser.InfCommonObject import InfLineCommentObject
+from Object.Parser.InfCommonObject import CurrentLine
+from Object.Parser.InfCommonObject import InfSectionCommonDef
+from Object.Parser.InfMisc import ErrorInInf
+from Object.Parser.InfDefineCommonObject import InfDefineLibraryItem
+from Object.Parser.InfDefineCommonObject import InfDefineEntryPointItem
+from Object.Parser.InfDefineCommonObject import InfDefineUnloadImageItem
+from Object.Parser.InfDefineCommonObject import InfDefineConstructorItem
+from Object.Parser.InfDefineCommonObject import InfDefineDestructorItem
+
+class InfDefSectionOptionRomInfo():
+ def __init__(self):
+ self.PciVendorId = None
+ self.PciDeviceId = None
+ self.PciClassCode = None
+ self.PciRevision = None
+ self.PciCompress = None
+ self.CurrentLine = ['', -1, '']
+ def SetPciVendorId(self, PciVendorId, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.PciVendorId is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_VENDOR_ID),
+ LineInfo=self.CurrentLine)
+ return False
+ #
+ # The PciVendorId should be hex string.
+ #
+ if (IsValidHex(PciVendorId)):
+ self.PciVendorId = InfDefMember()
+ self.PciVendorId.SetValue(PciVendorId)
+ self.PciVendorId.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PciVendorId),
+ LineInfo=self.CurrentLine)
+ return False
+
+ def GetPciVendorId(self):
+ return self.PciVendorId
+
+ def SetPciDeviceId(self, PciDeviceId, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.PciDeviceId is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_DEVICE_ID),
+ LineInfo=self.CurrentLine)
+ return False
+ #
+ # The PciDeviceId should be hex string.
+ #
+ if (IsValidHex(PciDeviceId)):
+ self.PciDeviceId = InfDefMember()
+ self.PciDeviceId.SetValue(PciDeviceId)
+ self.PciDeviceId.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PciDeviceId),
+ LineInfo=self.CurrentLine)
+ return False
+
+ def GetPciDeviceId(self):
+ return self.PciDeviceId
+
+ def SetPciClassCode(self, PciClassCode, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.PciClassCode is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_CLASS_CODE),
+ LineInfo=self.CurrentLine)
+ return False
+ #
+ # The PciClassCode should be 4 bytes hex string.
+ #
+ if (IsValidHex(PciClassCode)):
+ self.PciClassCode = InfDefMember()
+ self.PciClassCode.SetValue(PciClassCode)
+ self.PciClassCode.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%\
+ (PciClassCode),
+ LineInfo=self.CurrentLine)
+ return False
+
+ def GetPciClassCode(self):
+ return self.PciClassCode
+
+ def SetPciRevision(self, PciRevision, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.PciRevision is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_REVISION),
+ LineInfo=self.CurrentLine)
+ return False
+ #
+ # The PciRevision should be 4 bytes hex string.
+ #
+ if (IsValidHex(PciRevision)):
+ self.PciRevision = InfDefMember()
+ self.PciRevision.SetValue(PciRevision)
+ self.PciRevision.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PciRevision),
+ LineInfo=self.CurrentLine)
+ return False
+
+ def GetPciRevision(self):
+ return self.PciRevision
+
+ def SetPciCompress(self, PciCompress, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.PciCompress is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_COMPRESS),
+ LineInfo=self.CurrentLine)
+ return False
+
+ #
+ # The PciCompress should be 'TRUE' or 'FALSE'.
+ #
+ if (PciCompress == 'TRUE' or PciCompress == 'FALSE'):
+ self.PciCompress = InfDefMember()
+ self.PciCompress.SetValue(PciCompress)
+ self.PciCompress.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PciCompress),
+ LineInfo=self.CurrentLine)
+ return False
+ def GetPciCompress(self):
+ return self.PciCompress
+##
+# INF [Define] section Object
+#
+class InfDefSection(InfDefSectionOptionRomInfo):
+ def __init__(self):
+ self.BaseName = None
+ self.FileGuid = None
+ self.ModuleType = None
+ self.ModuleUniFileName = None
+ self.InfVersion = None
+ self.EdkReleaseVersion = None
+ self.UefiSpecificationVersion = None
+ self.PiSpecificationVersion = None
+ self.LibraryClass = []
+ self.Package = None
+ self.VersionString = None
+ self.PcdIsDriver = None
+ self.EntryPoint = []
+ self.UnloadImages = []
+ self.Constructor = []
+ self.Destructor = []
+ self.Shadow = None
+ self.CustomMakefile = []
+ self.Specification = []
+ self.UefiHiiResourceSection = None
+ self.DpxSource = []
+ self.CurrentLine = ['', -1, '']
+ InfDefSectionOptionRomInfo.__init__(self)
+
+ ## SetHeadComment
+ #
+ # @param BaseName: BaseName
+ #
+ def SetBaseName(self, BaseName, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.BaseName is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_BASE_NAME),
+ LineInfo=self.CurrentLine)
+ return False
+ if not (BaseName == '' or BaseName is None):
+ if IsValidWord(BaseName) and not BaseName.startswith("_"):
+ self.BaseName = InfDefMember()
+ self.BaseName.SetValue(BaseName)
+ self.BaseName.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_NAME_INVALID%(BaseName),
+ LineInfo=self.CurrentLine)
+ return False
+
+ ## GetBaseName
+ #
+ def GetBaseName(self):
+ return self.BaseName
+
+ ## SetFileGuid
+ #
+ # @param FileGuid: FileGuid
+ #
+ def SetFileGuid(self, FileGuid, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.FileGuid is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
+ %(DT.TAB_INF_DEFINES_FILE_GUID),
+ LineInfo=self.CurrentLine)
+ return False
+ #
+ # Do verification of GUID content/format
+ #
+ if (CheckGuidRegFormat(FileGuid)):
+ self.FileGuid = InfDefMember()
+ self.FileGuid.SetValue(FileGuid)
+ self.FileGuid.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_GUID_INVALID%(FileGuid),
+ LineInfo=self.CurrentLine)
+ return False
+
+ ## GetFileGuid
+ #
+ def GetFileGuid(self):
+ return self.FileGuid
+
+ ## SetModuleType
+ #
+ # @param ModuleType: ModuleType
+ #
+ def SetModuleType(self, ModuleType, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.ModuleType is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
+ %(DT.TAB_INF_DEFINES_MODULE_TYPE),
+ LineInfo=self.CurrentLine)
+ return False
+ #
+ # Valid Module Type or not
+ #
+ if (IsValidInfMoudleType(ModuleType)):
+ self.ModuleType = InfDefMember()
+ self.ModuleType.SetValue(ModuleType)
+ self.ModuleType.CurrentLine = CurrentLine()
+ self.ModuleType.CurrentLine.SetLineNo(self.CurrentLine[1])
+ self.ModuleType.CurrentLine.SetLineString(self.CurrentLine[2])
+ self.ModuleType.CurrentLine.SetFileName(self.CurrentLine[0])
+ self.ModuleType.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_MODULETYPE_INVALID%\
+ (ModuleType),
+ LineInfo=self.CurrentLine)
+ return False
+
+ ## GetModuleType
+ #
+ def GetModuleType(self):
+ return self.ModuleType
+
+ ## SetModuleUniFileName
+ #
+ # @param ModuleUniFileName: ModuleUniFileName
+ #
+ def SetModuleUniFileName(self, ModuleUniFileName, Comments):
+ if Comments:
+ pass
+ if self.ModuleUniFileName is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_MODULE_UNI_FILE),
+ LineInfo=self.CurrentLine)
+ self.ModuleUniFileName = ModuleUniFileName
+
+ ## GetModuleType
+ #
+ def GetModuleUniFileName(self):
+ return self.ModuleUniFileName
+
+ ## SetInfVersion
+ #
+ # @param InfVersion: InfVersion
+ #
+ def SetInfVersion(self, InfVersion, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.InfVersion is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
+ %(DT.TAB_INF_DEFINES_INF_VERSION),
+ LineInfo=self.CurrentLine)
+ return False
+ #
+ # The InfVersion should be 4 bytes hex string.
+ #
+ if (IsValidHex(InfVersion)):
+ if (InfVersion < '0x00010005'):
+ ErrorInInf(ST.ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF,
+ ErrorCode=ToolError.EDK1_INF_ERROR,
+ LineInfo=self.CurrentLine)
+ elif IsValidDecVersionVal(InfVersion):
+ if (InfVersion < 65541):
+ ErrorInInf(ST.ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF,
+ ErrorCode=ToolError.EDK1_INF_ERROR,
+ LineInfo=self.CurrentLine)
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(InfVersion),
+ LineInfo=self.CurrentLine)
+ return False
+
+ self.InfVersion = InfDefMember()
+ self.InfVersion.SetValue(InfVersion)
+ self.InfVersion.Comments = Comments
+ return True
+
+ ## GetInfVersion
+ #
+ def GetInfVersion(self):
+ return self.InfVersion
+
+ ## SetEdkReleaseVersion
+ #
+ # @param EdkReleaseVersion: EdkReleaseVersion
+ #
+ def SetEdkReleaseVersion(self, EdkReleaseVersion, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.EdkReleaseVersion is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
+ %(DT.TAB_INF_DEFINES_EDK_RELEASE_VERSION),
+ LineInfo=self.CurrentLine)
+ return False
+ #
+ # The EdkReleaseVersion should be 4 bytes hex string.
+ #
+ if IsValidHexVersion(EdkReleaseVersion) or \
+ IsValidDecVersionVal(EdkReleaseVersion):
+ self.EdkReleaseVersion = InfDefMember()
+ self.EdkReleaseVersion.SetValue(EdkReleaseVersion)
+ self.EdkReleaseVersion.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID\
+ %(EdkReleaseVersion),
+ LineInfo=self.CurrentLine)
+ return False
+
+ ## GetEdkReleaseVersion
+ #
+ def GetEdkReleaseVersion(self):
+ return self.EdkReleaseVersion
+
+ ## SetUefiSpecificationVersion
+ #
+ # @param UefiSpecificationVersion: UefiSpecificationVersion
+ #
+ def SetUefiSpecificationVersion(self, UefiSpecificationVersion, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.UefiSpecificationVersion is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
+ %(DT.TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION),
+ LineInfo=self.CurrentLine)
+ return False
+ #
+ # The EdkReleaseVersion should be 4 bytes hex string.
+ #
+ if IsValidHexVersion(UefiSpecificationVersion) or \
+ IsValidDecVersionVal(UefiSpecificationVersion):
+ self.UefiSpecificationVersion = InfDefMember()
+ self.UefiSpecificationVersion.SetValue(UefiSpecificationVersion)
+ self.UefiSpecificationVersion.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID\
+ %(UefiSpecificationVersion),
+ LineInfo=self.CurrentLine)
+ return False
+
+ ## GetUefiSpecificationVersion
+ #
+ def GetUefiSpecificationVersion(self):
+ return self.UefiSpecificationVersion
+
+ ## SetPiSpecificationVersion
+ #
+ # @param PiSpecificationVersion: PiSpecificationVersion
+ #
+ def SetPiSpecificationVersion(self, PiSpecificationVersion, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.PiSpecificationVersion is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
+ %(DT.TAB_INF_DEFINES_PI_SPECIFICATION_VERSION),
+ LineInfo=self.CurrentLine)
+ return False
+ #
+ # The EdkReleaseVersion should be 4 bytes hex string.
+ #
+ if IsValidHexVersion(PiSpecificationVersion) or \
+ IsValidDecVersionVal(PiSpecificationVersion):
+ self.PiSpecificationVersion = InfDefMember()
+ self.PiSpecificationVersion.SetValue(PiSpecificationVersion)
+ self.PiSpecificationVersion.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID\
+ %(PiSpecificationVersion),
+ LineInfo=self.CurrentLine)
+ return False
+
+ ## GetPiSpecificationVersion
+ #
+ def GetPiSpecificationVersion(self):
+ return self.PiSpecificationVersion
+
+ ## SetLibraryClass
+ #
+ # @param LibraryClass: LibraryClass
+ #
+ def SetLibraryClass(self, LibraryClass, Comments):
+ ValueList = GetSplitValueList(LibraryClass)
+ Name = ValueList[0]
+ if IsValidWord(Name):
+ InfDefineLibraryItemObj = InfDefineLibraryItem()
+ InfDefineLibraryItemObj.SetLibraryName(Name)
+ InfDefineLibraryItemObj.Comments = Comments
+ if len(ValueList) == 2:
+ Type = ValueList[1]
+ TypeList = GetSplitValueList(Type, ' ')
+ TypeList = [Type for Type in TypeList if Type != '']
+ for Item in TypeList:
+ if Item not in DT.MODULE_LIST:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Item),
+ LineInfo=self.CurrentLine)
+ return False
+ InfDefineLibraryItemObj.SetTypes(TypeList)
+ self.LibraryClass.append(InfDefineLibraryItemObj)
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Name),
+ LineInfo=self.CurrentLine)
+ return False
+
+ return True
+
+ def GetLibraryClass(self):
+ return self.LibraryClass
+
+ def SetVersionString(self, VersionString, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.VersionString is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
+ %(DT.TAB_INF_DEFINES_VERSION_STRING),
+ LineInfo=self.CurrentLine)
+ return False
+ if not IsValidDecVersion(VersionString):
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID\
+ %(VersionString),
+ LineInfo=self.CurrentLine)
+ self.VersionString = InfDefMember()
+ self.VersionString.SetValue(VersionString)
+ self.VersionString.Comments = Comments
+ return True
+
+
+ def GetVersionString(self):
+ return self.VersionString
+
+ def SetPcdIsDriver(self, PcdIsDriver, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.PcdIsDriver is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
+ %(DT.TAB_INF_DEFINES_PCD_IS_DRIVER),
+ LineInfo=self.CurrentLine)
+ return False
+ if PcdIsDriver == 'PEI_PCD_DRIVER' or PcdIsDriver == 'DXE_PCD_DRIVER':
+ self.PcdIsDriver = InfDefMember()
+ self.PcdIsDriver.SetValue(PcdIsDriver)
+ self.PcdIsDriver.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PcdIsDriver),
+ LineInfo=self.CurrentLine)
+ return False
+
+ def GetPcdIsDriver(self):
+ return self.PcdIsDriver
+
+ #
+ # SetEntryPoint
+ #
+ def SetEntryPoint(self, EntryPoint, Comments):
+ #
+ # It can be a list
+ #
+ ValueList = []
+ TokenList = GetSplitValueList(EntryPoint, DT.TAB_VALUE_SPLIT)
+ ValueList[0:len(TokenList)] = TokenList
+ InfDefineEntryPointItemObj = InfDefineEntryPointItem()
+ if not IsValidCVariableName(ValueList[0]):
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%\
+ (ValueList[0]),
+ LineInfo=self.CurrentLine)
+ InfDefineEntryPointItemObj.SetCName(ValueList[0])
+ if len(ValueList) == 2:
+ if ValueList[1].strip() == '':
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%\
+ (ValueList[1]),
+ LineInfo=self.CurrentLine)
+ #
+ # Validate FFE
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(ValueList[1].strip())
+ if not FeatureFlagRtv[0]:
+ ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%\
+ (FeatureFlagRtv[1]),
+ LineInfo=self.CurrentLine)
+ InfDefineEntryPointItemObj.SetFeatureFlagExp(ValueList[1])
+ if len(ValueList) > 2:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(EntryPoint),
+ LineInfo=self.CurrentLine)
+ InfDefineEntryPointItemObj.Comments = Comments
+ self.EntryPoint.append(InfDefineEntryPointItemObj)
+
+ def GetEntryPoint(self):
+ return self.EntryPoint
+
+ #
+ # SetUnloadImages
+ #
+ def SetUnloadImages(self, UnloadImages, Comments):
+ #
+ # It can be a list
+ #
+ ValueList = []
+ TokenList = GetSplitValueList(UnloadImages, DT.TAB_VALUE_SPLIT)
+ ValueList[0:len(TokenList)] = TokenList
+ InfDefineUnloadImageItemObj = InfDefineUnloadImageItem()
+ if not IsValidCVariableName(ValueList[0]):
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[0]),
+ LineInfo=self.CurrentLine)
+ InfDefineUnloadImageItemObj.SetCName(ValueList[0])
+ if len(ValueList) == 2:
+ if ValueList[1].strip() == '':
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[1]),
+ LineInfo=self.CurrentLine)
+ #
+ # Validate FFE
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(ValueList[1].strip())
+ if not FeatureFlagRtv[0]:
+ ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
+ LineInfo=self.CurrentLine)
+ InfDefineUnloadImageItemObj.SetFeatureFlagExp(ValueList[1])
+
+ if len(ValueList) > 2:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(UnloadImages),
+ LineInfo=self.CurrentLine)
+ InfDefineUnloadImageItemObj.Comments = Comments
+ self.UnloadImages.append(InfDefineUnloadImageItemObj)
+
+ def GetUnloadImages(self):
+ return self.UnloadImages
+
+ #
+ # SetConstructor
+ #
+ def SetConstructor(self, Constructor, Comments):
+ #
+ # It can be a list
+ #
+ ValueList = []
+ TokenList = GetSplitValueList(Constructor, DT.TAB_VALUE_SPLIT)
+ ValueList[0:len(TokenList)] = TokenList
+ InfDefineConstructorItemObj = InfDefineConstructorItem()
+ if not IsValidCVariableName(ValueList[0]):
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[0]),
+ LineInfo=self.CurrentLine)
+ InfDefineConstructorItemObj.SetCName(ValueList[0])
+ if len(ValueList) >= 2:
+ ModList = GetSplitValueList(ValueList[1], ' ')
+ if ValueList[1].strip() == '':
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[1]),
+ LineInfo=self.CurrentLine)
+ for ModItem in ModList:
+ if ModItem not in DT.MODULE_LIST:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_MODULETYPE_INVALID%(ModItem),
+ LineInfo=self.CurrentLine)
+ InfDefineConstructorItemObj.SetSupModList(ModList)
+ if len(ValueList) == 3:
+ if ValueList[2].strip() == '':
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[2]),
+ LineInfo=self.CurrentLine)
+ #
+ # Validate FFE
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(ValueList[2].strip())
+ if not FeatureFlagRtv[0]:
+ ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[2]),
+ LineInfo=self.CurrentLine)
+ InfDefineConstructorItemObj.SetFeatureFlagExp(ValueList[2])
+
+ if len(ValueList) > 3:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Constructor),
+ LineInfo=self.CurrentLine)
+ InfDefineConstructorItemObj.Comments = Comments
+ self.Constructor.append(InfDefineConstructorItemObj)
+
+ def GetConstructor(self):
+ return self.Constructor
+
+ #
+ # SetDestructor
+ #
+ def SetDestructor(self, Destructor, Comments):
+ #
+ # It can be a list and only 1 set to TRUE
+ #
+ ValueList = []
+ TokenList = GetSplitValueList(Destructor, DT.TAB_VALUE_SPLIT)
+ ValueList[0:len(TokenList)] = TokenList
+ InfDefineDestructorItemObj = InfDefineDestructorItem()
+ if not IsValidCVariableName(ValueList[0]):
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[0]),
+ LineInfo=self.CurrentLine)
+ InfDefineDestructorItemObj.SetCName(ValueList[0])
+ if len(ValueList) >= 2:
+ ModList = GetSplitValueList(ValueList[1].strip(), ' ')
+ if ValueList[1].strip() == '':
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[1]),
+ LineInfo=self.CurrentLine)
+ for ModItem in ModList:
+ if ModItem not in DT.MODULE_LIST:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_MODULETYPE_INVALID%(ModItem),
+ LineInfo=self.CurrentLine)
+ InfDefineDestructorItemObj.SetSupModList(ModList)
+ if len(ValueList) == 3:
+ if ValueList[2].strip() == '':
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[2]),
+ LineInfo=self.CurrentLine)
+ #
+ # Validate FFE
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(ValueList[2].strip())
+ if not FeatureFlagRtv[0]:
+ ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
+ LineInfo=self.CurrentLine)
+ InfDefineDestructorItemObj.SetFeatureFlagExp(ValueList[2])
+
+ if len(ValueList) > 3:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Destructor),
+ LineInfo=self.CurrentLine)
+
+ InfDefineDestructorItemObj.Comments = Comments
+ self.Destructor.append(InfDefineDestructorItemObj)
+
+ def GetDestructor(self):
+ return self.Destructor
+
+ def SetShadow(self, Shadow, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.Shadow is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_SHADOW),
+ LineInfo=self.CurrentLine)
+ return False
+ if (IsValidBoolType(Shadow)):
+ self.Shadow = InfDefMember()
+ self.Shadow.SetValue(Shadow)
+ self.Shadow.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Shadow),
+ LineInfo=self.CurrentLine)
+ return False
+ def GetShadow(self):
+ return self.Shadow
+
+ #
+ # <Family> ::= {"MSFT"} {"GCC"}
+ # <CustomMake> ::= [<Family> "|"] <Filename>
+ #
+ def SetCustomMakefile(self, CustomMakefile, Comments):
+ if not (CustomMakefile == '' or CustomMakefile is None):
+ ValueList = GetSplitValueList(CustomMakefile)
+ if len(ValueList) == 1:
+ FileName = ValueList[0]
+ Family = ''
+ else:
+ Family = ValueList[0]
+ FileName = ValueList[1]
+ Family = Family.strip()
+ if Family != '':
+ if not IsValidFamily(Family):
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Family),
+ LineInfo=self.CurrentLine)
+ return False
+ #
+ # The MakefileName specified file should exist
+ #
+ IsValidFileFlag = False
+ ModulePath = os.path.split(self.CurrentLine[0])[0]
+ if IsValidPath(FileName, ModulePath):
+ IsValidFileFlag = True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(FileName),
+ LineInfo=self.CurrentLine)
+ return False
+ if IsValidFileFlag:
+ FileName = ConvPathFromAbsToRel(FileName, GlobalData.gINF_MODULE_DIR)
+ self.CustomMakefile.append((Family, FileName, Comments))
+ IsValidFileFlag = False
+ return True
+ else:
+ return False
+
+ def GetCustomMakefile(self):
+ return self.CustomMakefile
+
+ #
+ # ["SPEC" <Spec> <EOL>]*{0,}
+ # <Spec> ::= <Word> "=" <VersionVal>
+ # <VersionVal> ::= {<HexVersion>] {<DecVersion>}
+ # <HexNumber> ::= "0x" [<HexDigit>]{1,}
+ # <DecVersion> ::= (0-9){1,} ["." (0-9){1,2}]
+ #
+ def SetSpecification(self, Specification, Comments):
+ #
+ # Valid the value of Specification
+ #
+ __ValueList = []
+ TokenList = GetSplitValueList(Specification, DT.TAB_EQUAL_SPLIT, 1)
+ __ValueList[0:len(TokenList)] = TokenList
+ if len(__ValueList) != 2:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_NO_NAME + ' Or ' + ST.ERR_INF_PARSER_DEFINE_ITEM_NO_VALUE,
+ LineInfo=self.CurrentLine)
+ Name = __ValueList[0].strip()
+ Version = __ValueList[1].strip()
+ if IsValidIdentifier(Name):
+ if IsValidDecVersion(Version):
+ self.Specification.append((Name, Version, Comments))
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Version),
+ LineInfo=self.CurrentLine)
+ return False
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Name),
+ LineInfo=self.CurrentLine)
+ return False
+ return True
+
+ def GetSpecification(self):
+ return self.Specification
+
+ #
+ # [<UefiHiiResource> <EOL>]{0,1}
+ # <UefiHiiResource> ::= "UEFI_HII_RESOURCE_SECTION" "=" <BoolType>
+ #
+ def SetUefiHiiResourceSection(self, UefiHiiResourceSection, Comments):
+ #
+ # Value has been set before.
+ #
+ if self.UefiHiiResourceSection is not None:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND
+ %(DT.TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION),
+ LineInfo=self.CurrentLine)
+ return False
+ if not (UefiHiiResourceSection == '' or UefiHiiResourceSection is None):
+ if (IsValidBoolType(UefiHiiResourceSection)):
+ self.UefiHiiResourceSection = InfDefMember()
+ self.UefiHiiResourceSection.SetValue(UefiHiiResourceSection)
+ self.UefiHiiResourceSection.Comments = Comments
+ return True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(UefiHiiResourceSection),
+ LineInfo=self.CurrentLine)
+ return False
+ else:
+ return False
+
+ def GetUefiHiiResourceSection(self):
+ return self.UefiHiiResourceSection
+
+ def SetDpxSource(self, DpxSource, Comments):
+ #
+ # The MakefileName specified file should exist
+ #
+ IsValidFileFlag = False
+ ModulePath = os.path.split(self.CurrentLine[0])[0]
+ if IsValidPath(DpxSource, ModulePath):
+ IsValidFileFlag = True
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(DpxSource),
+ LineInfo=self.CurrentLine)
+ return False
+ if IsValidFileFlag:
+ DpxSource = ConvPathFromAbsToRel(DpxSource,
+ GlobalData.gINF_MODULE_DIR)
+ self.DpxSource.append((DpxSource, Comments))
+ IsValidFileFlag = False
+ return True
+
+ def GetDpxSource(self):
+ return self.DpxSource
+
+gFUNCTION_MAPPING_FOR_DEFINE_SECTION = {
+ #
+ # Required Fields
+ #
+ DT.TAB_INF_DEFINES_BASE_NAME : InfDefSection.SetBaseName,
+ DT.TAB_INF_DEFINES_FILE_GUID : InfDefSection.SetFileGuid,
+ DT.TAB_INF_DEFINES_MODULE_TYPE : InfDefSection.SetModuleType,
+ #
+ # Required by EDKII style INF file
+ #
+ DT.TAB_INF_DEFINES_INF_VERSION : InfDefSection.SetInfVersion,
+ #
+ # Optional Fields
+ #
+ DT.TAB_INF_DEFINES_MODULE_UNI_FILE : InfDefSection.SetModuleUniFileName,
+ DT.TAB_INF_DEFINES_EDK_RELEASE_VERSION : InfDefSection.SetEdkReleaseVersion,
+ DT.TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION : InfDefSection.SetUefiSpecificationVersion,
+ DT.TAB_INF_DEFINES_PI_SPECIFICATION_VERSION : InfDefSection.SetPiSpecificationVersion,
+ DT.TAB_INF_DEFINES_LIBRARY_CLASS : InfDefSection.SetLibraryClass,
+ DT.TAB_INF_DEFINES_VERSION_STRING : InfDefSection.SetVersionString,
+ DT.TAB_INF_DEFINES_PCD_IS_DRIVER : InfDefSection.SetPcdIsDriver,
+ DT.TAB_INF_DEFINES_ENTRY_POINT : InfDefSection.SetEntryPoint,
+ DT.TAB_INF_DEFINES_UNLOAD_IMAGE : InfDefSection.SetUnloadImages,
+ DT.TAB_INF_DEFINES_CONSTRUCTOR : InfDefSection.SetConstructor,
+ DT.TAB_INF_DEFINES_DESTRUCTOR : InfDefSection.SetDestructor,
+ DT.TAB_INF_DEFINES_SHADOW : InfDefSection.SetShadow,
+ DT.TAB_INF_DEFINES_PCI_VENDOR_ID : InfDefSection.SetPciVendorId,
+ DT.TAB_INF_DEFINES_PCI_DEVICE_ID : InfDefSection.SetPciDeviceId,
+ DT.TAB_INF_DEFINES_PCI_CLASS_CODE : InfDefSection.SetPciClassCode,
+ DT.TAB_INF_DEFINES_PCI_REVISION : InfDefSection.SetPciRevision,
+ DT.TAB_INF_DEFINES_PCI_COMPRESS : InfDefSection.SetPciCompress,
+ DT.TAB_INF_DEFINES_CUSTOM_MAKEFILE : InfDefSection.SetCustomMakefile,
+ DT.TAB_INF_DEFINES_SPEC : InfDefSection.SetSpecification,
+ DT.TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION : InfDefSection.SetUefiHiiResourceSection,
+ DT.TAB_INF_DEFINES_DPX_SOURCE : InfDefSection.SetDpxSource
+}
+
+## InfDefMember
+#
+#
+class InfDefMember():
+ def __init__(self, Name='', Value=''):
+ self.Comments = InfLineCommentObject()
+ self.Name = Name
+ self.Value = Value
+ self.CurrentLine = CurrentLine()
+ def GetName(self):
+ return self.Name
+ def SetName(self, Name):
+ self.Name = Name
+ def GetValue(self):
+ return self.Value
+ def SetValue(self, Value):
+ self.Value = Value
+
+## InfDefObject
+#
+#
+class InfDefObject(InfSectionCommonDef):
+ def __init__(self):
+ self.Defines = Sdict()
+ InfSectionCommonDef.__init__(self)
+ def SetDefines(self, DefineContent, Arch = None):
+ #
+ # Validate Arch
+ #
+ HasFoundInfVersionFalg = False
+ LineInfo = ['', -1, '']
+ ArchListString = ' '.join(Arch)
+ #
+ # Parse Define items.
+ #
+ for InfDefMemberObj in DefineContent:
+ ProcessFunc = None
+ Name = InfDefMemberObj.GetName()
+ Value = InfDefMemberObj.GetValue()
+ if Name == DT.TAB_INF_DEFINES_MODULE_UNI_FILE:
+ ValidateUNIFilePath(Value)
+ Value = os.path.join(os.path.dirname(InfDefMemberObj.CurrentLine.FileName), Value)
+ if not os.path.isfile(Value) or not os.path.exists(Value):
+ LineInfo[0] = InfDefMemberObj.CurrentLine.GetFileName()
+ LineInfo[1] = InfDefMemberObj.CurrentLine.GetLineNo()
+ LineInfo[2] = InfDefMemberObj.CurrentLine.GetLineString()
+ ErrorInInf(ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(Name),
+ LineInfo=LineInfo)
+ InfLineCommentObj = InfLineCommentObject()
+ InfLineCommentObj.SetHeaderComments(InfDefMemberObj.Comments.GetHeaderComments())
+ InfLineCommentObj.SetTailComments(InfDefMemberObj.Comments.GetTailComments())
+ if Name == 'COMPONENT_TYPE':
+ ErrorInInf(ST.ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF,
+ ErrorCode=ToolError.EDK1_INF_ERROR,
+ RaiseError=True)
+ if Name == DT.TAB_INF_DEFINES_INF_VERSION:
+ HasFoundInfVersionFalg = True
+ if not (Name == '' or Name is None):
+ #
+ # Process "SPEC" Keyword definition.
+ #
+ ReName = re.compile(r"SPEC ", re.DOTALL)
+ if ReName.match(Name):
+ SpecValue = Name[Name.find("SPEC") + len("SPEC"):].strip()
+ Name = "SPEC"
+ Value = SpecValue + " = " + Value
+ if ArchListString in self.Defines:
+ DefineList = self.Defines[ArchListString]
+ LineInfo[0] = InfDefMemberObj.CurrentLine.GetFileName()
+ LineInfo[1] = InfDefMemberObj.CurrentLine.GetLineNo()
+ LineInfo[2] = InfDefMemberObj.CurrentLine.GetLineString()
+ DefineList.CurrentLine = LineInfo
+ #
+ # Found the process function from mapping table.
+ #
+ if Name not in gFUNCTION_MAPPING_FOR_DEFINE_SECTION.keys():
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_SECTION_KEYWORD_INVALID%(Name),
+ LineInfo=LineInfo)
+ else:
+ ProcessFunc = gFUNCTION_MAPPING_FOR_DEFINE_SECTION[Name]
+ if (ProcessFunc is not None):
+ ProcessFunc(DefineList, Value, InfLineCommentObj)
+ self.Defines[ArchListString] = DefineList
+ else:
+ DefineList = InfDefSection()
+ LineInfo[0] = InfDefMemberObj.CurrentLine.GetFileName()
+ LineInfo[1] = InfDefMemberObj.CurrentLine.GetLineNo()
+ LineInfo[2] = InfDefMemberObj.CurrentLine.GetLineString()
+ DefineList.CurrentLine = LineInfo
+ #
+ # Found the process function from mapping table.
+ #
+ if Name not in gFUNCTION_MAPPING_FOR_DEFINE_SECTION.keys():
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_SECTION_KEYWORD_INVALID%(Name),
+ LineInfo=LineInfo)
+ #
+ # Found the process function from mapping table.
+ #
+ else:
+ ProcessFunc = gFUNCTION_MAPPING_FOR_DEFINE_SECTION[Name]
+ if (ProcessFunc is not None):
+ ProcessFunc(DefineList, Value, InfLineCommentObj)
+ self.Defines[ArchListString] = DefineList
+ #
+ # After set, check whether INF_VERSION defined.
+ #
+ if not HasFoundInfVersionFalg:
+ ErrorInInf(ST.ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF,
+ ErrorCode=ToolError.EDK1_INF_ERROR,
+ RaiseError=True)
+ return True
+
+ def GetDefines(self):
+ return self.Defines
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfDepexObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfDepexObject.py
new file mode 100755
index 00000000..b0b1db68
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfDepexObject.py
@@ -0,0 +1,160 @@
+## @file
+# This file is used to define class objects of INF file [Depex] section.
+# It will consumed by InfParser.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfDepexObject
+'''
+
+from Library import DataType as DT
+from Library import GlobalData
+import Logger.Log as Logger
+from Logger import ToolError
+from Logger import StringTable as ST
+
+from Object.Parser.InfCommonObject import InfSectionCommonDef
+from Library.ParserValidate import IsValidArch
+
+class InfDepexContentItem():
+ def __init__(self):
+ self.SectionType = ''
+ self.SectionString = ''
+
+ def SetSectionType(self, SectionType):
+ self.SectionType = SectionType
+ def GetSectionType(self):
+ return self.SectionType
+
+ def SetSectionString(self, SectionString):
+ self.SectionString = SectionString
+ def GetSectionString(self):
+ return self.SectionString
+
+
+class InfDepexItem():
+ def __init__(self):
+ self.DepexContent = ''
+ self.ModuleType = ''
+ self.SupArch = ''
+ self.HelpString = ''
+ self.FeatureFlagExp = ''
+ self.InfDepexContentItemList = []
+
+ def SetFeatureFlagExp(self, FeatureFlagExp):
+ self.FeatureFlagExp = FeatureFlagExp
+ def GetFeatureFlagExp(self):
+ return self.FeatureFlagExp
+
+ def SetSupArch(self, Arch):
+ self.SupArch = Arch
+ def GetSupArch(self):
+ return self.SupArch
+
+ def SetHelpString(self, HelpString):
+ self.HelpString = HelpString
+ def GetHelpString(self):
+ return self.HelpString
+
+ def SetModuleType(self, Type):
+ self.ModuleType = Type
+ def GetModuleType(self):
+ return self.ModuleType
+
+ def SetDepexConent(self, Content):
+ self.DepexContent = Content
+ def GetDepexContent(self):
+ return self.DepexContent
+
+ def SetInfDepexContentItemList(self, InfDepexContentItemList):
+ self.InfDepexContentItemList = InfDepexContentItemList
+ def GetInfDepexContentItemList(self):
+ return self.InfDepexContentItemList
+
+## InfDepexObject
+#
+#
+#
+class InfDepexObject(InfSectionCommonDef):
+ def __init__(self):
+ self.Depex = []
+ self.AllContent = ''
+ self.SectionContent = ''
+ InfSectionCommonDef.__init__(self)
+
+ def SetDepex(self, DepexContent, KeyList=None, CommentList=None):
+ for KeyItem in KeyList:
+ Arch = KeyItem[0]
+ ModuleType = KeyItem[1]
+ InfDepexItemIns = InfDepexItem()
+
+ #
+ # Validate Arch
+ #
+ if IsValidArch(Arch.strip().upper()):
+ InfDepexItemIns.SetSupArch(Arch)
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_NAME_INVALID % (Arch),
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=KeyItem[2])
+
+ #
+ # Validate Module Type
+ #
+ if ModuleType and ModuleType != 'COMMON':
+ if ModuleType in DT.VALID_DEPEX_MODULE_TYPE_LIST:
+ InfDepexItemIns.SetModuleType(ModuleType)
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEPEX_SECTION_MODULE_TYPE_ERROR % (ModuleType),
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=KeyItem[2])
+
+ #
+ # Parser content in [Depex] section.
+ #
+ DepexString = ''
+ HelpString = ''
+ #
+ # Get Depex Expression
+ #
+ for Line in DepexContent:
+ LineContent = Line[0].strip()
+ if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
+ LineContent = LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
+ if LineContent:
+ DepexString = DepexString + LineContent + DT.END_OF_LINE
+ continue
+
+ if DepexString.endswith(DT.END_OF_LINE):
+ DepexString = DepexString[:-1]
+
+ if not DepexString.strip():
+ continue
+
+ #
+ # Get Help Text
+ #
+ for HelpLine in CommentList:
+ HelpString = HelpString + HelpLine + DT.END_OF_LINE
+ if HelpString.endswith(DT.END_OF_LINE):
+ HelpString = HelpString[:-1]
+
+ InfDepexItemIns.SetDepexConent(DepexString)
+ InfDepexItemIns.SetHelpString(HelpString)
+
+ self.Depex.append(InfDepexItemIns)
+
+ return True
+
+ def GetDepex(self):
+ return self.Depex
+
+ def GetAllContent(self):
+ return self.AllContent
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py
new file mode 100755
index 00000000..f0eff8bd
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py
@@ -0,0 +1,347 @@
+## @file
+# This file is used to define class objects of INF file [Guids] section.
+# It will consumed by InfParser.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfGuidObject
+'''
+
+from Library.ParserValidate import IsValidCVariableName
+from Library.CommentParsing import ParseComment
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+
+from Library.Misc import Sdict
+from Library import DataType as DT
+import Logger.Log as Logger
+from Logger import ToolError
+from Logger import StringTable as ST
+
+class InfGuidItemCommentContent():
+ def __init__(self):
+ #
+ # ## SOMETIMES_CONSUMES ## Variable:L"MemoryTypeInformation"
+ # TailString.
+ #
+ #
+ # SOMETIMES_CONSUMES
+ #
+ self.UsageItem = ''
+ #
+ # Variable
+ #
+ self.GuidTypeItem = ''
+ #
+ # MemoryTypeInformation
+ #
+ self.VariableNameItem = ''
+ #
+ # TailString
+ #
+ self.HelpStringItem = ''
+
+ def SetUsageItem(self, UsageItem):
+ self.UsageItem = UsageItem
+ def GetUsageItem(self):
+ return self.UsageItem
+
+ def SetGuidTypeItem(self, GuidTypeItem):
+ self.GuidTypeItem = GuidTypeItem
+ def GetGuidTypeItem(self):
+ return self.GuidTypeItem
+
+ def SetVariableNameItem(self, VariableNameItem):
+ self.VariableNameItem = VariableNameItem
+ def GetVariableNameItem(self):
+ return self.VariableNameItem
+
+ def SetHelpStringItem(self, HelpStringItem):
+ self.HelpStringItem = HelpStringItem
+ def GetHelpStringItem(self):
+ return self.HelpStringItem
+
+class InfGuidItem():
+ def __init__(self):
+ self.Name = ''
+ self.FeatureFlagExp = ''
+ #
+ # A list contain instance of InfGuidItemCommentContent
+ #
+ self.CommentList = []
+ self.SupArchList = []
+
+ def SetName(self, Name):
+ self.Name = Name
+ def GetName(self):
+ return self.Name
+
+ def SetFeatureFlagExp(self, FeatureFlagExp):
+ self.FeatureFlagExp = FeatureFlagExp
+ def GetFeatureFlagExp(self):
+ return self.FeatureFlagExp
+
+ def SetCommentList(self, CommentList):
+ self.CommentList = CommentList
+ def GetCommentList(self):
+ return self.CommentList
+
+ def SetSupArchList(self, SupArchList):
+ self.SupArchList = SupArchList
+ def GetSupArchList(self):
+ return self.SupArchList
+
+## ParseComment
+#
+# ParseComment
+#
+def ParseGuidComment(CommentsList, InfGuidItemObj):
+ #
+ # Get/Set Usage and HelpString
+ #
+ if CommentsList is not None and len(CommentsList) != 0 :
+ CommentInsList = []
+ PreUsage = None
+ PreGuidType = None
+ PreHelpText = ''
+ BlockFlag = -1
+ Count = 0
+ for CommentItem in CommentsList:
+ Count = Count + 1
+ CommentItemUsage, \
+ CommentItemGuidType, \
+ CommentItemVarString, \
+ CommentItemHelpText = \
+ ParseComment(CommentItem,
+ DT.ALL_USAGE_TOKENS,
+ DT.GUID_TYPE_TOKENS,
+ [],
+ True)
+
+ if CommentItemHelpText is None:
+ CommentItemHelpText = ''
+ if Count == len(CommentsList) and CommentItemUsage == CommentItemGuidType == DT.ITEM_UNDEFINED:
+ CommentItemHelpText = DT.END_OF_LINE
+
+ if Count == len(CommentsList):
+ if BlockFlag == 1 or BlockFlag == 2:
+ if CommentItemUsage == CommentItemGuidType == DT.ITEM_UNDEFINED:
+ BlockFlag = 4
+ else:
+ BlockFlag = 3
+ if BlockFlag == -1:
+ BlockFlag = 4
+ if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
+ if CommentItemUsage == CommentItemGuidType == DT.ITEM_UNDEFINED:
+ if BlockFlag == -1:
+ BlockFlag = 1
+ elif BlockFlag == 1:
+ BlockFlag = 2
+ else:
+ if BlockFlag == 1 or BlockFlag == 2:
+ BlockFlag = 3
+ elif BlockFlag == -1:
+ BlockFlag = 4
+
+ #
+ # Combine two comment line if they are generic comment
+ #
+ if CommentItemUsage == CommentItemGuidType == PreUsage == PreGuidType == DT.ITEM_UNDEFINED:
+ CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText
+ PreHelpText = CommentItemHelpText
+
+ if BlockFlag == 4:
+ CommentItemIns = InfGuidItemCommentContent()
+ CommentItemIns.SetUsageItem(CommentItemUsage)
+ CommentItemIns.SetGuidTypeItem(CommentItemGuidType)
+ CommentItemIns.SetVariableNameItem(CommentItemVarString)
+ if CommentItemHelpText == '' or CommentItemHelpText.endswith(DT.END_OF_LINE):
+ CommentItemHelpText = CommentItemHelpText.strip(DT.END_OF_LINE)
+ CommentItemIns.SetHelpStringItem(CommentItemHelpText)
+ CommentInsList.append(CommentItemIns)
+
+ BlockFlag = -1
+ PreUsage = None
+ PreGuidType = None
+ PreHelpText = ''
+
+ elif BlockFlag == 3:
+ #
+ # Add previous help string
+ #
+ CommentItemIns = InfGuidItemCommentContent()
+ CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
+ CommentItemIns.SetGuidTypeItem(DT.ITEM_UNDEFINED)
+ if PreHelpText == '' or PreHelpText.endswith(DT.END_OF_LINE):
+ PreHelpText = PreHelpText.strip(DT.END_OF_LINE)
+ CommentItemIns.SetHelpStringItem(PreHelpText)
+ CommentInsList.append(CommentItemIns)
+ #
+ # Add Current help string
+ #
+ CommentItemIns = InfGuidItemCommentContent()
+ CommentItemIns.SetUsageItem(CommentItemUsage)
+ CommentItemIns.SetGuidTypeItem(CommentItemGuidType)
+ CommentItemIns.SetVariableNameItem(CommentItemVarString)
+ if CommentItemHelpText == '' or CommentItemHelpText.endswith(DT.END_OF_LINE):
+ CommentItemHelpText = CommentItemHelpText.strip(DT.END_OF_LINE)
+ CommentItemIns.SetHelpStringItem(CommentItemHelpText)
+ CommentInsList.append(CommentItemIns)
+
+ BlockFlag = -1
+ PreUsage = None
+ PreGuidType = None
+ PreHelpText = ''
+
+ else:
+ PreUsage = CommentItemUsage
+ PreGuidType = CommentItemGuidType
+ PreHelpText = CommentItemHelpText
+
+ InfGuidItemObj.SetCommentList(CommentInsList)
+ else:
+ #
+ # Still need to set the USAGE/GUIDTYPE to undefined.
+ #
+ CommentItemIns = InfGuidItemCommentContent()
+ CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
+ CommentItemIns.SetGuidTypeItem(DT.ITEM_UNDEFINED)
+ InfGuidItemObj.SetCommentList([CommentItemIns])
+
+ return InfGuidItemObj
+
+## InfGuidObject
+#
+# InfGuidObject
+#
+class InfGuidObject():
+ def __init__(self):
+ self.Guids = Sdict()
+ #
+ # Macro defined in this section should be only used in this section.
+ #
+ self.Macros = {}
+
+ def SetGuid(self, GuidList, Arch = None):
+ __SupportArchList = []
+ for ArchItem in Arch:
+ #
+ # Validate Arch
+ #
+ if (ArchItem == '' or ArchItem is None):
+ ArchItem = 'COMMON'
+
+ __SupportArchList.append(ArchItem)
+
+ for Item in GuidList:
+ #
+ # Get Comment content of this protocol
+ #
+ CommentsList = None
+ if len(Item) == 3:
+ CommentsList = Item[1]
+ CurrentLineOfItem = Item[2]
+ Item = Item[0]
+ InfGuidItemObj = InfGuidItem()
+ if len(Item) >= 1 and len(Item) <= 2:
+ #
+ # Only GuildName contained
+ #
+ if not IsValidCVariableName(Item[0]):
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_INVALID_CNAME%(Item[0]),
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ if (Item[0] != ''):
+ InfGuidItemObj.SetName(Item[0])
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_CNAME_MISSING,
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ if len(Item) == 2:
+ #
+ # Contained CName and Feature Flag Express
+ # <statements> ::= <CName> ["|" <FeatureFlagExpress>]
+ # For GUID entry.
+ #
+ if Item[1].strip() == '':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ #
+ # Validate Feature Flag Express
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(Item[1].strip())
+ if not FeatureFlagRtv[0]:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ InfGuidItemObj.SetFeatureFlagExp(Item[1])
+ if len(Item) != 1 and len(Item) != 2:
+ #
+ # Invalid format of GUID statement
+ #
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR,
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+
+ InfGuidItemObj = ParseGuidComment(CommentsList, InfGuidItemObj)
+ InfGuidItemObj.SetSupArchList(__SupportArchList)
+
+ #
+ # Determine GUID name duplicate. Follow below rule:
+ #
+ # A GUID must not be duplicated within a [Guids] section.
+ # A GUID may appear in multiple architectural [Guids]
+ # sections. A GUID listed in an architectural [Guids]
+ # section must not be listed in the common architectural
+ # [Guids] section.
+ #
+ # NOTE: This check will not report error now.
+ #
+ for Item in self.Guids:
+ if Item.GetName() == InfGuidItemObj.GetName():
+ ItemSupArchList = Item.GetSupArchList()
+ for ItemArch in ItemSupArchList:
+ for GuidItemObjArch in __SupportArchList:
+ if ItemArch == GuidItemObjArch:
+ #
+ # ST.ERR_INF_PARSER_ITEM_DUPLICATE
+ #
+ pass
+
+ if ItemArch.upper() == 'COMMON' or GuidItemObjArch.upper() == 'COMMON':
+ #
+ # ST.ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
+ #
+ pass
+
+ if (InfGuidItemObj) in self.Guids:
+ GuidList = self.Guids[InfGuidItemObj]
+ GuidList.append(InfGuidItemObj)
+ self.Guids[InfGuidItemObj] = GuidList
+ else:
+ GuidList = []
+ GuidList.append(InfGuidItemObj)
+ self.Guids[InfGuidItemObj] = GuidList
+
+ return True
+
+ def GetGuid(self):
+ return self.Guids
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py
new file mode 100755
index 00000000..2e70f08b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py
@@ -0,0 +1,113 @@
+## @file
+# This file is used to define class objects of INF file header.
+# It will consumed by InfParser.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfHeaderObject
+'''
+
+## INF file header object
+#
+# A sample file header
+#
+# ## @file xxx.inf FileName
+# # Abstract
+# #
+# # Description
+# #
+# # Copyright
+# #
+# # License
+# #
+#
+class InfHeaderObject():
+ def __init__(self):
+ self.FileName = ''
+ self.Abstract = ''
+ self.Description = ''
+ self.Copyright = ''
+ self.License = ''
+
+ ## SetFileName
+ #
+ # @param FileName: File Name
+ #
+ def SetFileName(self, FileName):
+ if not (FileName == '' or FileName is None):
+ self.FileName = FileName
+ return True
+ else:
+ return False
+
+ ## GetFileName
+ #
+ def GetFileName(self):
+ return self.FileName
+
+ ## SetAbstract
+ #
+ # @param Abstract: Abstract
+ #
+ def SetAbstract(self, Abstract):
+ if not (Abstract == '' or Abstract is None):
+ self.Abstract = Abstract
+ return True
+ else:
+ return False
+
+ ## GetAbstract
+ #
+ def GetAbstract(self):
+ return self.Abstract
+
+ ## SetDescription
+ #
+ # @param Description: Description content
+ #
+ def SetDescription(self, Description):
+ if not (Description == '' or Description is None):
+ self.Description = Description
+ return True
+ else:
+ return False
+
+ ## GetAbstract
+ #
+ def GetDescription(self):
+ return self.Description
+
+ ## SetCopyright
+ #
+ # @param Copyright: Copyright content
+ #
+ def SetCopyright(self, Copyright):
+ if not (Copyright == '' or Copyright is None):
+ self.Copyright = Copyright
+ return True
+ else:
+ return False
+
+ ## GetCopyright
+ #
+ def GetCopyright(self):
+ return self.Copyright
+
+ ## SetCopyright
+ #
+ # @param License: License content
+ #
+ def SetLicense(self, License):
+ if not (License == '' or License is None):
+ self.License = License
+ return True
+ else:
+ return False
+
+ ## GetLicense
+ #
+ def GetLicense(self):
+ return self.License
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py
new file mode 100755
index 00000000..b3433b9e
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py
@@ -0,0 +1,247 @@
+## @file
+# This file is used to define class objects of INF file [LibraryClasses] section.
+# It will consumed by InfParser.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfLibraryClassesObject
+'''
+
+from Logger import StringTable as ST
+from Logger import ToolError
+import Logger.Log as Logger
+from Library import GlobalData
+
+from Library.Misc import Sdict
+from Object.Parser.InfCommonObject import CurrentLine
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+from Library.ParserValidate import IsValidLibName
+
+## GetArchModuleType
+#
+# Get Arch List and ModuleType List
+#
+def GetArchModuleType(KeyList):
+ __SupArchList = []
+ __SupModuleList = []
+
+ for (ArchItem, ModuleItem) in KeyList:
+ #
+ # Validate Arch
+ #
+ if (ArchItem == '' or ArchItem is None):
+ ArchItem = 'COMMON'
+
+ if (ModuleItem == '' or ModuleItem is None):
+ ModuleItem = 'COMMON'
+
+ if ArchItem not in __SupArchList:
+ __SupArchList.append(ArchItem)
+
+ List = ModuleItem.split('|')
+ for Entry in List:
+ if Entry not in __SupModuleList:
+ __SupModuleList.append(Entry)
+
+ return (__SupArchList, __SupModuleList)
+
+
+class InfLibraryClassItem():
+ def __init__(self, LibName='', FeatureFlagExp='', HelpString=None):
+ self.LibName = LibName
+ self.FeatureFlagExp = FeatureFlagExp
+ self.HelpString = HelpString
+ self.CurrentLine = CurrentLine()
+ self.SupArchList = []
+ self.SupModuleList = []
+ self.FileGuid = ''
+ self.Version = ''
+
+ def SetLibName(self, LibName):
+ self.LibName = LibName
+ def GetLibName(self):
+ return self.LibName
+
+ def SetHelpString(self, HelpString):
+ self.HelpString = HelpString
+ def GetHelpString(self):
+ return self.HelpString
+
+ def SetFeatureFlagExp(self, FeatureFlagExp):
+ self.FeatureFlagExp = FeatureFlagExp
+ def GetFeatureFlagExp(self):
+ return self.FeatureFlagExp
+
+ def SetSupArchList(self, SupArchList):
+ self.SupArchList = SupArchList
+ def GetSupArchList(self):
+ return self.SupArchList
+
+ def SetSupModuleList(self, SupModuleList):
+ self.SupModuleList = SupModuleList
+ def GetSupModuleList(self):
+ return self.SupModuleList
+
+ #
+ # As Build related information
+ #
+ def SetFileGuid(self, FileGuid):
+ self.FileGuid = FileGuid
+ def GetFileGuid(self):
+ return self.FileGuid
+
+ def SetVersion(self, Version):
+ self.Version = Version
+ def GetVersion(self):
+ return self.Version
+
+## INF LibraryClass Section
+#
+#
+#
+class InfLibraryClassObject():
+ def __init__(self):
+ self.LibraryClasses = Sdict()
+ #
+ # Macro defined in this section should be only used in this section.
+ #
+ self.Macros = {}
+
+ ##SetLibraryClasses
+ #
+ #
+ # @param HelpString: It can be a common comment or contain a recommend
+ # instance.
+ #
+ def SetLibraryClasses(self, LibContent, KeyList=None):
+ #
+ # Validate Arch
+ #
+ (__SupArchList, __SupModuleList) = GetArchModuleType(KeyList)
+
+ for LibItem in LibContent:
+ LibItemObj = InfLibraryClassItem()
+ if not GlobalData.gIS_BINARY_INF:
+ HelpStringObj = LibItem[1]
+ LibItemObj.CurrentLine.SetFileName(LibItem[2][2])
+ LibItemObj.CurrentLine.SetLineNo(LibItem[2][1])
+ LibItemObj.CurrentLine.SetLineString(LibItem[2][0])
+ LibItem = LibItem[0]
+ if HelpStringObj is not None:
+ LibItemObj.SetHelpString(HelpStringObj)
+ if len(LibItem) >= 1:
+ if LibItem[0].strip() != '':
+ if IsValidLibName(LibItem[0].strip()):
+ if LibItem[0].strip() != 'NULL':
+ LibItemObj.SetLibName(LibItem[0])
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_LIB_NAME_INVALID,
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=LibItemObj.CurrentLine.GetLineNo(),
+ ExtraData=LibItemObj.CurrentLine.GetLineString())
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID % (LibItem[0]),
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=LibItemObj.CurrentLine.GetLineNo(),
+ ExtraData=LibItemObj.CurrentLine.GetLineString())
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_LIBRARY_SECTION_LIBNAME_MISSING,
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=LibItemObj.CurrentLine.GetLineNo(),
+ ExtraData=LibItemObj.CurrentLine.GetLineString())
+ if len(LibItem) == 2:
+ if LibItem[1].strip() == '':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=LibItemObj.CurrentLine.GetLineNo(),
+ ExtraData=LibItemObj.CurrentLine.GetLineString())
+ #
+ # Validate FFE
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(LibItem[1].strip())
+ if not FeatureFlagRtv[0]:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID % (FeatureFlagRtv[1]),
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=LibItemObj.CurrentLine.GetLineNo(),
+ ExtraData=LibItemObj.CurrentLine.GetLineString())
+ LibItemObj.SetFeatureFlagExp(LibItem[1].strip())
+
+ #
+ # Invalid strings
+ #
+ if len(LibItem) < 1 or len(LibItem) > 2:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_LIBRARY_SECTION_CONTENT_ERROR,
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=LibItemObj.CurrentLine.GetLineNo(),
+ ExtraData=LibItemObj.CurrentLine.GetLineString())
+
+ LibItemObj.SetSupArchList(__SupArchList)
+ LibItemObj.SetSupModuleList(__SupModuleList)
+
+ #
+ # Determine Library class duplicate. Follow below rule:
+ #
+ # A library class keyword must not be duplicated within a
+ # [LibraryClasses] section. Library class keywords may appear in
+ # multiple architectural and module type [LibraryClasses] sections.
+ # A library class keyword listed in an architectural or module type
+ # [LibraryClasses] section must not be listed in the common
+ # architectural or module type [LibraryClasses] section.
+ #
+ # NOTE: This check will not report error now. But keep code for future enhancement.
+ #
+# for Item in self.LibraryClasses:
+# if Item.GetLibName() == LibItemObj.GetLibName():
+# ItemSupArchList = Item.GetSupArchList()
+# ItemSupModuleList = Item.GetSupModuleList()
+# for ItemArch in ItemSupArchList:
+# for ItemModule in ItemSupModuleList:
+# for LibItemObjArch in __SupArchList:
+# for LibItemObjModule in __SupModuleList:
+# if ItemArch == LibItemObjArch and LibItemObjModule == ItemModule:
+# #
+# # ERR_INF_PARSER_ITEM_DUPLICATE
+# #
+# pass
+# if (ItemArch.upper() == 'COMMON' or LibItemObjArch.upper() == 'COMMON') \
+# and LibItemObjModule == ItemModule:
+# #
+# # ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
+# #
+# pass
+ else:
+ #
+ # Assume the file GUID is well formatted.
+ #
+ LibItemObj.SetFileGuid(LibItem[0])
+ LibItemObj.SetVersion(LibItem[1])
+ LibItemObj.SetSupArchList(__SupArchList)
+
+ if (LibItemObj) in self.LibraryClasses:
+ LibraryList = self.LibraryClasses[LibItemObj]
+ LibraryList.append(LibItemObj)
+ self.LibraryClasses[LibItemObj] = LibraryList
+ else:
+ LibraryList = []
+ LibraryList.append(LibItemObj)
+ self.LibraryClasses[LibItemObj] = LibraryList
+
+ return True
+
+ def GetLibraryClasses(self):
+ return self.LibraryClasses
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py
new file mode 100755
index 00000000..1354ab45
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py
@@ -0,0 +1,142 @@
+## @file
+# This file is used to define class objects of INF file miscellaneous.
+# Include BootMode/HOB/Event and others. It will consumed by InfParser.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfMisc
+'''
+
+import Logger.Log as Logger
+from Logger import ToolError
+
+from Library import DataType as DT
+from Object.Parser.InfCommonObject import InfSectionCommonDef
+from Library.Misc import Sdict
+
+##
+# BootModeObject
+#
+class InfBootModeObject():
+ def __init__(self):
+ self.SupportedBootModes = ''
+ self.HelpString = ''
+ self.Usage = ''
+
+ def SetSupportedBootModes(self, SupportedBootModes):
+ self.SupportedBootModes = SupportedBootModes
+ def GetSupportedBootModes(self):
+ return self.SupportedBootModes
+
+ def SetHelpString(self, HelpString):
+ self.HelpString = HelpString
+ def GetHelpString(self):
+ return self.HelpString
+
+ def SetUsage(self, Usage):
+ self.Usage = Usage
+ def GetUsage(self):
+ return self.Usage
+##
+# EventObject
+#
+class InfEventObject():
+ def __init__(self):
+ self.EventType = ''
+ self.HelpString = ''
+ self.Usage = ''
+
+ def SetEventType(self, EventType):
+ self.EventType = EventType
+
+ def GetEventType(self):
+ return self.EventType
+
+ def SetHelpString(self, HelpString):
+ self.HelpString = HelpString
+ def GetHelpString(self):
+ return self.HelpString
+
+ def SetUsage(self, Usage):
+ self.Usage = Usage
+ def GetUsage(self):
+ return self.Usage
+##
+# HobObject
+#
+class InfHobObject():
+ def __init__(self):
+ self.HobType = ''
+ self.Usage = ''
+ self.SupArchList = []
+ self.HelpString = ''
+
+ def SetHobType(self, HobType):
+ self.HobType = HobType
+
+ def GetHobType(self):
+ return self.HobType
+
+ def SetUsage(self, Usage):
+ self.Usage = Usage
+ def GetUsage(self):
+ return self.Usage
+
+ def SetSupArchList(self, ArchList):
+ self.SupArchList = ArchList
+ def GetSupArchList(self):
+ return self.SupArchList
+
+ def SetHelpString(self, HelpString):
+ self.HelpString = HelpString
+ def GetHelpString(self):
+ return self.HelpString
+
+##
+# InfSpecialCommentObject
+#
+class InfSpecialCommentObject(InfSectionCommonDef):
+ def __init__(self):
+ self.SpecialComments = Sdict()
+ InfSectionCommonDef.__init__(self)
+
+ def SetSpecialComments(self, SepcialSectionList = None, Type = ''):
+ if Type == DT.TYPE_HOB_SECTION or \
+ Type == DT.TYPE_EVENT_SECTION or \
+ Type == DT.TYPE_BOOTMODE_SECTION:
+ for Item in SepcialSectionList:
+ if Type in self.SpecialComments:
+ ObjList = self.SpecialComments[Type]
+ ObjList.append(Item)
+ self.SpecialComments[Type] = ObjList
+ else:
+ ObjList = []
+ ObjList.append(Item)
+ self.SpecialComments[Type] = ObjList
+
+ return True
+
+ def GetSpecialComments(self):
+ return self.SpecialComments
+
+
+
+## ErrorInInf
+#
+# An encapsulate of Error for INF parser.
+#
+def ErrorInInf(Message=None, ErrorCode=None, LineInfo=None, RaiseError=True):
+ if ErrorCode is None:
+ ErrorCode = ToolError.FORMAT_INVALID
+ if LineInfo is None:
+ LineInfo = ['', -1, '']
+ Logger.Error("InfParser",
+ ErrorCode,
+ Message=Message,
+ File=LineInfo[0],
+ Line=LineInfo[1],
+ ExtraData=LineInfo[2],
+ RaiseError=RaiseError)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py
new file mode 100755
index 00000000..1ad04758
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py
@@ -0,0 +1,181 @@
+## @file
+# This file is used to define class objects of INF file [Packages] section.
+# It will consumed by InfParser.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfPackageObject
+'''
+
+from Logger import StringTable as ST
+from Logger import ToolError
+import Logger.Log as Logger
+from Library import GlobalData
+
+from Library.Misc import Sdict
+from Library.ParserValidate import IsValidPath
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+
+class InfPackageItem():
+ def __init__(self,
+ PackageName = '',
+ FeatureFlagExp = '',
+ HelpString = ''):
+ self.PackageName = PackageName
+ self.FeatureFlagExp = FeatureFlagExp
+ self.HelpString = HelpString
+ self.SupArchList = []
+
+ def SetPackageName(self, PackageName):
+ self.PackageName = PackageName
+ def GetPackageName(self):
+ return self.PackageName
+
+ def SetFeatureFlagExp(self, FeatureFlagExp):
+ self.FeatureFlagExp = FeatureFlagExp
+ def GetFeatureFlagExp(self):
+ return self.FeatureFlagExp
+
+ def SetHelpString(self, HelpString):
+ self.HelpString = HelpString
+ def GetHelpString(self):
+ return self.HelpString
+
+ def SetSupArchList(self, SupArchList):
+ self.SupArchList = SupArchList
+ def GetSupArchList(self):
+ return self.SupArchList
+
+
+## INF package section
+#
+#
+#
+class InfPackageObject():
+ def __init__(self):
+ self.Packages = Sdict()
+ #
+ # Macro defined in this section should be only used in this section.
+ #
+ self.Macros = {}
+
+ def SetPackages(self, PackageData, Arch = None):
+ IsValidFileFlag = False
+ SupArchList = []
+ for ArchItem in Arch:
+ #
+ # Validate Arch
+ #
+ if (ArchItem == '' or ArchItem is None):
+ ArchItem = 'COMMON'
+ SupArchList.append(ArchItem)
+
+ for PackageItem in PackageData:
+ PackageItemObj = InfPackageItem()
+ HelpStringObj = PackageItem[1]
+ CurrentLineOfPackItem = PackageItem[2]
+ PackageItem = PackageItem[0]
+ if HelpStringObj is not None:
+ HelpString = HelpStringObj.HeaderComments + HelpStringObj.TailComments
+ PackageItemObj.SetHelpString(HelpString)
+ if len(PackageItem) >= 1:
+ #
+ # Validate file exist/format.
+ #
+ if IsValidPath(PackageItem[0], ''):
+ IsValidFileFlag = True
+ elif IsValidPath(PackageItem[0], GlobalData.gINF_MODULE_DIR):
+ IsValidFileFlag = True
+ elif IsValidPath(PackageItem[0], GlobalData.gWORKSPACE):
+ IsValidFileFlag = True
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(PackageItem[0]),
+ File=CurrentLineOfPackItem[2],
+ Line=CurrentLineOfPackItem[1],
+ ExtraData=CurrentLineOfPackItem[0])
+ return False
+ if IsValidFileFlag:
+ PackageItemObj.SetPackageName(PackageItem[0])
+ if len(PackageItem) == 2:
+ #
+ # Validate Feature Flag Express
+ #
+ if PackageItem[1].strip() == '':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
+ File=CurrentLineOfPackItem[2],
+ Line=CurrentLineOfPackItem[1],
+ ExtraData=CurrentLineOfPackItem[0])
+ #
+ # Validate FFE
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(PackageItem[1].strip())
+ if not FeatureFlagRtv[0]:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
+ File=CurrentLineOfPackItem[2],
+ Line=CurrentLineOfPackItem[1],
+ ExtraData=CurrentLineOfPackItem[0])
+
+ PackageItemObj.SetFeatureFlagExp(PackageItem[1].strip())
+
+ if len(PackageItem) > 2:
+ #
+ # Invalid format of Package statement
+ #
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_PACKAGE_SECTION_CONTENT_ERROR,
+ File=CurrentLineOfPackItem[2],
+ Line=CurrentLineOfPackItem[1],
+ ExtraData=CurrentLineOfPackItem[0])
+ PackageItemObj.SetSupArchList(SupArchList)
+
+ #
+ # Determine package file name duplicate. Follow below rule:
+ #
+ # A package filename must not be duplicated within a [Packages]
+ # section. Package filenames may appear in multiple architectural
+ # [Packages] sections. A package filename listed in an
+ # architectural [Packages] section must not be listed in the common
+ # architectural [Packages] section.
+ #
+ # NOTE: This check will not report error now.
+ #
+ for Item in self.Packages:
+ if Item.GetPackageName() == PackageItemObj.GetPackageName():
+ ItemSupArchList = Item.GetSupArchList()
+ for ItemArch in ItemSupArchList:
+ for PackageItemObjArch in SupArchList:
+ if ItemArch == PackageItemObjArch:
+ #
+ # ST.ERR_INF_PARSER_ITEM_DUPLICATE
+ #
+ pass
+ if ItemArch.upper() == 'COMMON' or PackageItemObjArch.upper() == 'COMMON':
+ #
+ # ST.ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
+ #
+ pass
+
+ if (PackageItemObj) in self.Packages:
+ PackageList = self.Packages[PackageItemObj]
+ PackageList.append(PackageItemObj)
+ self.Packages[PackageItemObj] = PackageList
+ else:
+ PackageList = []
+ PackageList.append(PackageItemObj)
+ self.Packages[PackageItemObj] = PackageList
+
+ return True
+
+ def GetPackages(self, Arch = None):
+ if Arch is None:
+ return self.Packages
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py
new file mode 100755
index 00000000..ff6eb061
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py
@@ -0,0 +1,669 @@
+## @file
+# This file is used to define class objects of INF file [Pcds] section.
+# It will consumed by InfParser.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfPcdObject
+'''
+import os
+import re
+
+from Logger import StringTable as ST
+from Logger import ToolError
+import Logger.Log as Logger
+from Library import GlobalData
+from Library import DataType as DT
+
+from Library.Misc import Sdict
+from Library.Misc import GetHelpStringByRemoveHashKey
+from Library.ParserValidate import IsValidPcdType
+from Library.ParserValidate import IsValidCVariableName
+from Library.ParserValidate import IsValidPcdValue
+from Library.ParserValidate import IsValidArch
+from Library.CommentParsing import ParseComment
+from Library.StringUtils import GetSplitValueList
+from Library.StringUtils import IsHexDigitUINT32
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+from Parser.InfAsBuiltProcess import GetPackageListInfo
+from Parser.DecParser import Dec
+
+from Object.Parser.InfPackagesObject import InfPackageItem
+
+def ValidateArch(ArchItem, PcdTypeItem1, LineNo, SupArchDict, SupArchList):
+ #
+ # Validate Arch
+ #
+ if (ArchItem == '' or ArchItem is None):
+ ArchItem = 'COMMON'
+
+ if PcdTypeItem1.upper != DT.TAB_INF_FEATURE_PCD.upper():
+ ArchList = GetSplitValueList(ArchItem, ' ')
+ for ArchItemNew in ArchList:
+ if not IsValidArch(ArchItemNew):
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID % (ArchItemNew),
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=LineNo,
+ ExtraData=ArchItemNew)
+ SupArchDict[PcdTypeItem1] = ArchList
+ else:
+ SupArchList.append(ArchItem)
+
+ return SupArchList, SupArchDict
+
+def ParsePcdComment(CommentList, PcdTypeItem, PcdItemObj):
+ CommentInsList = []
+ PreUsage = None
+ PreHelpText = ''
+ BlockFlag = -1
+ FFEHelpText = ''
+ CommentItemHelpText = ''
+ Count = 0
+ for CommentItem in CommentList:
+ Count = Count + 1
+ CommentItemUsage, CommentType, CommentString, CommentItemHelpText = ParseComment(CommentItem,
+ DT.ALL_USAGE_TOKENS,
+ {},
+ [],
+ False)
+ if CommentType and CommentString:
+ pass
+
+ if PcdTypeItem == 'FeaturePcd':
+ CommentItemUsage = DT.USAGE_ITEM_CONSUMES
+ if CommentItemHelpText is None:
+ CommentItemHelpText = ''
+
+ if Count == 1:
+ FFEHelpText = CommentItemHelpText
+ else:
+ FFEHelpText = FFEHelpText + DT.END_OF_LINE + CommentItemHelpText
+
+ if Count == len(CommentList):
+ CommentItemHelpText = FFEHelpText
+ BlockFlag = 4
+ else:
+ continue
+
+ if CommentItemHelpText is None:
+ CommentItemHelpText = ''
+ if Count == len(CommentList) and CommentItemUsage == DT.ITEM_UNDEFINED:
+ CommentItemHelpText = DT.END_OF_LINE
+
+ if Count == len(CommentList) and (BlockFlag == 1 or BlockFlag == 2):
+ if CommentItemUsage == DT.ITEM_UNDEFINED:
+ BlockFlag = 4
+ else:
+ BlockFlag = 3
+ elif BlockFlag == -1 and Count == len(CommentList):
+ BlockFlag = 4
+
+ if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
+ if CommentItemUsage == DT.ITEM_UNDEFINED:
+ if BlockFlag == -1:
+ BlockFlag = 1
+ elif BlockFlag == 1:
+ BlockFlag = 2
+ else:
+ if BlockFlag == 1 or BlockFlag == 2:
+ BlockFlag = 3
+ elif BlockFlag == -1:
+ BlockFlag = 4
+ #
+ # Combine two comment line if they are generic comment
+ #
+ if CommentItemUsage == PreUsage == DT.ITEM_UNDEFINED:
+ CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText
+
+ PreHelpText = CommentItemHelpText
+
+ if BlockFlag == 4:
+ CommentItemIns = InfPcdItemCommentContent()
+ CommentItemIns.SetUsageItem(CommentItemUsage)
+ CommentItemIns.SetHelpStringItem(CommentItemHelpText)
+ CommentInsList.append(CommentItemIns)
+
+ BlockFlag = -1
+ PreUsage = None
+ PreHelpText = ''
+
+ elif BlockFlag == 3:
+ #
+ # Add previous help string
+ #
+ CommentItemIns = InfPcdItemCommentContent()
+ CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
+ if PreHelpText == '' or PreHelpText.endswith(DT.END_OF_LINE):
+ PreHelpText += DT.END_OF_LINE
+ CommentItemIns.SetHelpStringItem(PreHelpText)
+ CommentInsList.append(CommentItemIns)
+ #
+ # Add Current help string
+ #
+ CommentItemIns = InfPcdItemCommentContent()
+ CommentItemIns.SetUsageItem(CommentItemUsage)
+ CommentItemIns.SetHelpStringItem(CommentItemHelpText)
+ CommentInsList.append(CommentItemIns)
+
+ BlockFlag = -1
+ PreUsage = None
+ PreHelpText = ''
+
+ else:
+ PreUsage = CommentItemUsage
+ PreHelpText = CommentItemHelpText
+
+ PcdItemObj.SetHelpStringList(CommentInsList)
+
+ return PcdItemObj
+
+class InfPcdItemCommentContent():
+ def __init__(self):
+ #
+ # ## SOMETIMES_CONSUMES ## HelpString
+ #
+ self.UsageItem = ''
+ #
+ # Help String
+ #
+ self.HelpStringItem = ''
+
+ def SetUsageItem(self, UsageItem):
+ self.UsageItem = UsageItem
+ def GetUsageItem(self):
+ return self.UsageItem
+
+ def SetHelpStringItem(self, HelpStringItem):
+ self.HelpStringItem = HelpStringItem
+ def GetHelpStringItem(self):
+ return self.HelpStringItem
+
+## InfPcdItem
+#
+# This class defined Pcd item used in Module files
+#
+# @param CName: Input value for CName, default is ''
+# @param Token: Input value for Token, default is ''
+# @param TokenSpaceGuidCName: Input value for TokenSpaceGuidCName, default
+# is ''
+# @param DatumType: Input value for DatumType, default is ''
+# @param MaxDatumSize: Input value for MaxDatumSize, default is ''
+# @param DefaultValue: Input value for DefaultValue, default is ''
+# @param ItemType: Input value for ItemType, default is ''
+# @param ValidUsage: Input value for ValidUsage, default is []
+# @param SkuInfoList: Input value for SkuInfoList, default is {}
+# @param SupModuleList: Input value for SupModuleList, default is []
+#
+class InfPcdItem():
+ def __init__(self):
+ self.CName = ''
+ self.Token = ''
+ self.TokenSpaceGuidCName = ''
+ self.TokenSpaceGuidValue = ''
+ self.DatumType = ''
+ self.MaxDatumSize = ''
+ self.DefaultValue = ''
+ self.Offset = ''
+ self.ValidUsage = ''
+ self.ItemType = ''
+ self.SupModuleList = []
+ self.HelpStringList = []
+ self.FeatureFlagExp = ''
+ self.SupArchList = []
+ self.PcdErrorsList = []
+
+ def SetCName(self, CName):
+ self.CName = CName
+ def GetCName(self):
+ return self.CName
+
+ def SetToken(self, Token):
+ self.Token = Token
+ def GetToken(self):
+ return self.Token
+
+ def SetTokenSpaceGuidCName(self, TokenSpaceGuidCName):
+ self.TokenSpaceGuidCName = TokenSpaceGuidCName
+ def GetTokenSpaceGuidCName(self):
+ return self.TokenSpaceGuidCName
+
+ def SetTokenSpaceGuidValue(self, TokenSpaceGuidValue):
+ self.TokenSpaceGuidValue = TokenSpaceGuidValue
+ def GetTokenSpaceGuidValue(self):
+ return self.TokenSpaceGuidValue
+
+ def SetDatumType(self, DatumType):
+ self.DatumType = DatumType
+ def GetDatumType(self):
+ return self.DatumType
+
+ def SetMaxDatumSize(self, MaxDatumSize):
+ self.MaxDatumSize = MaxDatumSize
+ def GetMaxDatumSize(self):
+ return self.MaxDatumSize
+
+ def SetDefaultValue(self, DefaultValue):
+ self.DefaultValue = DefaultValue
+ def GetDefaultValue(self):
+ return self.DefaultValue
+
+ def SetPcdErrorsList(self, PcdErrorsList):
+ self.PcdErrorsList = PcdErrorsList
+ def GetPcdErrorsList(self):
+ return self.PcdErrorsList
+
+ def SetItemType(self, ItemType):
+ self.ItemType = ItemType
+ def GetItemType(self):
+ return self.ItemType
+
+ def SetSupModuleList(self, SupModuleList):
+ self.SupModuleList = SupModuleList
+ def GetSupModuleList(self):
+ return self.SupModuleList
+
+ def SetHelpStringList(self, HelpStringList):
+ self.HelpStringList = HelpStringList
+ def GetHelpStringList(self):
+ return self.HelpStringList
+
+ def SetFeatureFlagExp(self, FeatureFlagExp):
+ self.FeatureFlagExp = FeatureFlagExp
+ def GetFeatureFlagExp(self):
+ return self.FeatureFlagExp
+
+ def SetSupportArchList(self, ArchList):
+ self.SupArchList = ArchList
+ def GetSupportArchList(self):
+ return self.SupArchList
+
+ def SetOffset(self, Offset):
+ self.Offset = Offset
+ def GetOffset(self):
+ return self.Offset
+
+ def SetValidUsage(self, ValidUsage):
+ self.ValidUsage = ValidUsage
+
+ def GetValidUsage(self):
+ return self.ValidUsage
+
+##
+#
+#
+#
+class InfPcdObject():
+ def __init__(self, FileName):
+ self.Pcds = Sdict()
+ self.FileName = FileName
+
+ def SetPcds(self, PcdContent, KeysList=None, PackageInfo=None):
+
+ if GlobalData.gIS_BINARY_INF:
+ self.SetAsBuildPcds(PcdContent, KeysList, PackageInfo)
+ return True
+
+ #
+ # Validate Arch
+ #
+ SupArchList = []
+ SupArchDict = {}
+ PcdTypeItem = ''
+ for (PcdTypeItem1, ArchItem, LineNo) in KeysList:
+ SupArchList, SupArchDict = ValidateArch(ArchItem, PcdTypeItem1, LineNo, SupArchDict, SupArchList)
+
+ #
+ # Validate PcdType
+ #
+ if (PcdTypeItem1 == '' or PcdTypeItem1 is None):
+ return False
+ else:
+ if not IsValidPcdType(PcdTypeItem1):
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_PCD_SECTION_TYPE_ERROR % (DT.PCD_USAGE_TYPE_LIST_OF_MODULE),
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=LineNo,
+ ExtraData=PcdTypeItem1)
+ return False
+
+ PcdTypeItem = PcdTypeItem1
+
+ for PcdItem in PcdContent:
+ PcdItemObj = InfPcdItem()
+ CommentList = PcdItem[1]
+ CurrentLineOfPcdItem = PcdItem[2]
+ PcdItem = PcdItem[0]
+
+ if CommentList is not None and len(CommentList) != 0:
+ PcdItemObj = ParsePcdComment(CommentList, PcdTypeItem, PcdItemObj)
+ else:
+ CommentItemIns = InfPcdItemCommentContent()
+ CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
+ PcdItemObj.SetHelpStringList([CommentItemIns])
+
+ if len(PcdItem) >= 1 and len(PcdItem) <= 3:
+ PcdItemObj = SetPcdName(PcdItem, CurrentLineOfPcdItem, PcdItemObj)
+
+ if len(PcdItem) >= 2 and len(PcdItem) <= 3:
+ #
+ # Contain PcdName and Value, validate value.
+ #
+ if IsValidPcdValue(PcdItem[1]) or PcdItem[1].strip() == "":
+ PcdItemObj.SetDefaultValue(PcdItem[1])
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_PCD_VALUE_INVALID,
+ File=CurrentLineOfPcdItem[2],
+ Line=CurrentLineOfPcdItem[1],
+ ExtraData=PcdItem[1])
+
+ if len(PcdItem) == 3:
+ #
+ # Contain PcdName, value, and FeatureFlag express
+ #
+ #
+ # Validate Feature Flag Express
+ #
+ if PcdItem[2].strip() == '':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
+ File=CurrentLineOfPcdItem[2],
+ Line=CurrentLineOfPcdItem[1],
+ ExtraData=CurrentLineOfPcdItem[0])
+ #
+ # Validate FFE
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(PcdItem[2].strip())
+ if not FeatureFlagRtv[0]:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID % (FeatureFlagRtv[1]),
+ File=CurrentLineOfPcdItem[2],
+ Line=CurrentLineOfPcdItem[1],
+ ExtraData=CurrentLineOfPcdItem[0])
+ PcdItemObj.SetFeatureFlagExp(PcdItem[2])
+
+ if len(PcdItem) < 1 or len(PcdItem) > 3:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_PCD_SECTION_CONTENT_ERROR,
+ File=CurrentLineOfPcdItem[2],
+ Line=CurrentLineOfPcdItem[1],
+ ExtraData=CurrentLineOfPcdItem[0])
+ return False
+
+ if PcdTypeItem.upper != DT.TAB_INF_FEATURE_PCD.upper():
+ PcdItemObj.SetSupportArchList(SupArchDict[PcdTypeItem])
+ else:
+ PcdItemObj.SetSupportArchList(SupArchList)
+
+ if (PcdTypeItem, PcdItemObj) in self.Pcds:
+ PcdsList = self.Pcds[PcdTypeItem, PcdItemObj]
+ PcdsList.append(PcdItemObj)
+ self.Pcds[PcdTypeItem, PcdItemObj] = PcdsList
+ else:
+ PcdsList = []
+ PcdsList.append(PcdItemObj)
+ self.Pcds[PcdTypeItem, PcdItemObj] = PcdsList
+
+ return True
+
+ def SetAsBuildPcds(self, PcdContent, KeysList=None, PackageInfo=None):
+ for PcdItem in PcdContent:
+ PcdItemObj = InfPcdItem()
+ CommentList = PcdItem[1]
+ CurrentLineOfPcdItem = PcdItem[2]
+ PcdItem = PcdItem[0]
+ CommentString = ''
+
+ for CommentLine in CommentList:
+ CommentString = GetHelpStringByRemoveHashKey(CommentLine)
+ CommentItemIns = InfPcdItemCommentContent()
+ CommentItemIns.SetHelpStringItem(CommentString)
+ CommentItemIns.SetUsageItem(CommentString)
+ PcdItemObj.SetHelpStringList(PcdItemObj.GetHelpStringList() + [CommentItemIns])
+ if PcdItemObj.GetValidUsage():
+ PcdItemObj.SetValidUsage(PcdItemObj.GetValidUsage() + DT.TAB_VALUE_SPLIT + CommentString)
+ else:
+ PcdItemObj.SetValidUsage(CommentString)
+
+ PcdItemObj.SetItemType(KeysList[0][0])
+ #
+ # Set PcdTokenSpaceCName and CName
+ #
+ PcdItemObj = SetPcdName(PcdItem, CurrentLineOfPcdItem, PcdItemObj)
+ #
+ # Set Value/DatumType/OffSet/Token
+ #
+ PcdItemObj = SetValueDatumTypeMaxSizeToken(PcdItem,
+ CurrentLineOfPcdItem,
+ PcdItemObj,
+ KeysList[0][1],
+ PackageInfo)
+
+ PcdTypeItem = KeysList[0][0]
+ if (PcdTypeItem, PcdItemObj) in self.Pcds:
+ PcdsList = self.Pcds[PcdTypeItem, PcdItemObj]
+ PcdsList.append(PcdItemObj)
+ self.Pcds[PcdTypeItem, PcdItemObj] = PcdsList
+ else:
+ PcdsList = []
+ PcdsList.append(PcdItemObj)
+ self.Pcds[PcdTypeItem, PcdItemObj] = PcdsList
+
+ def GetPcds(self):
+ return self.Pcds
+
+def ParserPcdInfoInDec(String):
+ ValueList = GetSplitValueList(String, DT.TAB_VALUE_SPLIT, 3)
+
+ #
+ # DatumType, Token
+ #
+ return ValueList[2], ValueList[3]
+
+def SetValueDatumTypeMaxSizeToken(PcdItem, CurrentLineOfPcdItem, PcdItemObj, Arch, PackageInfo=None):
+ #
+ # Package information not been generated currently, we need to parser INF file to get information.
+ #
+ if not PackageInfo:
+ PackageInfo = []
+ InfFileName = CurrentLineOfPcdItem[2]
+ PackageInfoList = GetPackageListInfo(InfFileName, GlobalData.gWORKSPACE, -1)
+ for PackageInfoListItem in PackageInfoList:
+ PackageInfoIns = InfPackageItem()
+ PackageInfoIns.SetPackageName(PackageInfoListItem)
+ PackageInfo.append(PackageInfoIns)
+
+ PcdInfoInDecHasFound = False
+ for PackageItem in PackageInfo:
+ if PcdInfoInDecHasFound:
+ break
+ PackageName = PackageItem.PackageName
+ #
+ # Open DEC file to get information
+ #
+ FullFileName = os.path.normpath(os.path.realpath(os.path.join(GlobalData.gWORKSPACE, PackageName)))
+
+ DecParser = None
+ if FullFileName not in GlobalData.gPackageDict:
+ DecParser = Dec(FullFileName)
+ GlobalData.gPackageDict[FullFileName] = DecParser
+ else:
+ DecParser = GlobalData.gPackageDict[FullFileName]
+
+ #
+ # Find PCD information.
+ #
+ DecPcdsDict = DecParser.GetPcdSectionObject().ValueDict
+ for Key in DecPcdsDict.keys():
+ if (Key[0] == 'PCDSDYNAMICEX' and PcdItemObj.GetItemType() == 'PcdEx') and \
+ (Key[1] == 'COMMON' or Key[1] == Arch):
+ for PcdInDec in DecPcdsDict[Key]:
+ if PcdInDec.TokenCName == PcdItemObj.CName and \
+ PcdInDec.TokenSpaceGuidCName == PcdItemObj.TokenSpaceGuidCName:
+ PcdItemObj.SetToken(PcdInDec.TokenValue)
+ PcdItemObj.SetDatumType(PcdInDec.DatumType)
+ PcdItemObj.SetSupportArchList([Arch])
+ PcdItemObj.SetDefaultValue(PcdInDec.DefaultValue)
+
+ if (Key[0] == 'PCDSPATCHABLEINMODULE' and PcdItemObj.GetItemType() == 'PatchPcd') and \
+ (Key[1] == 'COMMON' or Key[1] == Arch):
+ for PcdInDec in DecPcdsDict[Key]:
+ if PcdInDec.TokenCName == PcdItemObj.CName and \
+ PcdInDec.TokenSpaceGuidCName == PcdItemObj.TokenSpaceGuidCName:
+ PcdItemObj.SetToken(PcdInDec.TokenValue)
+ PcdItemObj.SetDatumType(PcdInDec.DatumType)
+ PcdItemObj.SetSupportArchList([Arch])
+
+ if PcdItemObj.GetDatumType() == 'VOID*':
+ if len(PcdItem) > 1:
+ PcdItemObj.SetMaxDatumSize('%s' % (len(GetSplitValueList(PcdItem[1], DT.TAB_COMMA_SPLIT))))
+
+ DecGuidsDict = DecParser.GetGuidSectionObject().ValueDict
+ for Key in DecGuidsDict.keys():
+ if Key == 'COMMON' or Key == Arch:
+ for GuidInDec in DecGuidsDict[Key]:
+ if GuidInDec.GuidCName == PcdItemObj.TokenSpaceGuidCName:
+ PcdItemObj.SetTokenSpaceGuidValue(GuidInDec.GuidString)
+
+ if PcdItemObj.GetItemType().upper() == DT.TAB_INF_PATCH_PCD.upper():
+ #
+ # Validate Value.
+ #
+ # convert the value from a decimal 0 to a formatted hex value.
+ if PcdItem[1] == "0":
+ DatumType = PcdItemObj.GetDatumType()
+ if DatumType == "UINT8":
+ PcdItem[1] = "0x00"
+ if DatumType == "UINT16":
+ PcdItem[1] = "0x0000"
+ if DatumType == "UINT32":
+ PcdItem[1] = "0x00000000"
+ if DatumType == "UINT64":
+ PcdItem[1] = "0x0000000000000000"
+
+ if ValidatePcdValueOnDatumType(PcdItem[1], PcdItemObj.GetDatumType()):
+ PcdItemObj.SetDefaultValue(PcdItem[1])
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_ASBUILD_PCD_VALUE_INVALID % ("\"" + PcdItem[1] + "\"", "\"" +
+ PcdItemObj.GetDatumType() + "\""),
+ File=CurrentLineOfPcdItem[2],
+ Line=CurrentLineOfPcdItem[1],
+ ExtraData=CurrentLineOfPcdItem[0])
+ #
+ # validate offset
+ #
+ if PcdItemObj.GetItemType().upper() == DT.TAB_INF_PATCH_PCD.upper():
+ if not IsHexDigitUINT32(PcdItem[2]):
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_ASBUILD_PCD_OFFSET_FORMAT_INVALID % ("\"" + PcdItem[2] + "\""),
+ File=CurrentLineOfPcdItem[2],
+ Line=CurrentLineOfPcdItem[1],
+ ExtraData=CurrentLineOfPcdItem[0])
+ PcdItemObj.SetOffset(PcdItem[2])
+
+ if PcdItemObj.GetToken() == '' or PcdItemObj.GetDatumType() == '':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_ASBUILD_PCD_DECLARITION_MISS % ("\"" + PcdItem[0] + "\""),
+ File=CurrentLineOfPcdItem[2],
+ Line=CurrentLineOfPcdItem[1],
+ ExtraData=CurrentLineOfPcdItem[0])
+
+ return PcdItemObj
+
+def ValidatePcdValueOnDatumType(Value, Type):
+
+ Value = Value.strip()
+ #
+ # Boolean type only allow 0x00 or 0x01 as value per INF spec
+ #
+ if Type == 'BOOLEAN':
+ if not (Value == '0x00' or Value == '0x01'):
+ return False
+ elif Type == 'VOID*':
+ if not Value.startswith("{"):
+ return False
+ if not Value.endswith("}"):
+ return False
+ #
+ # Strip "{" at head and "}" at tail.
+ #
+ Value = Value[1:-1]
+ ValueList = GetSplitValueList(Value, DT.TAB_COMMA_SPLIT)
+
+ ReIsValidHexByte = re.compile("^0x[0-9a-f]{1,2}$", re.IGNORECASE)
+ for ValueItem in ValueList:
+ if not ReIsValidHexByte.match(ValueItem):
+ return False
+
+ elif Type == 'UINT8' or Type == 'UINT16' or Type == 'UINT32' or Type == 'UINT64':
+
+ ReIsValidUint8z = re.compile('^0[x|X][a-fA-F0-9]{2}$')
+ ReIsValidUint16z = re.compile('^0[x|X][a-fA-F0-9]{4}$')
+ ReIsValidUint32z = re.compile('^0[x|X][a-fA-F0-9]{8}$')
+ ReIsValidUint64z = re.compile('^0[x|X][a-fA-F0-9]{16}$')
+
+ if not ReIsValidUint8z.match(Value) and Type == 'UINT8':
+ return False
+ elif not ReIsValidUint16z.match(Value) and Type == 'UINT16':
+ return False
+ elif not ReIsValidUint32z.match(Value) and Type == 'UINT32':
+ return False
+ elif not ReIsValidUint64z.match(Value) and Type == 'UINT64':
+ return False
+ else:
+ #
+ # Since we assume the DEC file always correct, should never go to here.
+ #
+ pass
+
+ return True
+
+def SetPcdName(PcdItem, CurrentLineOfPcdItem, PcdItemObj):
+ #
+ # Only PCD Name specified
+ # <PcdName> ::= <TokenSpaceGuidCName> "." <TokenCName>
+ #
+ PcdId = GetSplitValueList(PcdItem[0], DT.TAB_SPLIT)
+ if len(PcdId) != 2:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_PCD_NAME_FORMAT_ERROR,
+ File=CurrentLineOfPcdItem[2],
+ Line=CurrentLineOfPcdItem[1],
+ ExtraData=CurrentLineOfPcdItem[0])
+ else:
+ #
+ # Validate PcdTokenSpaceGuidCName
+ #
+ if not IsValidCVariableName(PcdId[0]):
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_PCD_CVAR_GUID,
+ File=CurrentLineOfPcdItem[2],
+ Line=CurrentLineOfPcdItem[1],
+ ExtraData=PcdId[0])
+ if not IsValidCVariableName(PcdId[1]):
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_PCD_CVAR_PCDCNAME,
+ File=CurrentLineOfPcdItem[2],
+ Line=CurrentLineOfPcdItem[1],
+ ExtraData=PcdId[1])
+ PcdItemObj.SetTokenSpaceGuidCName(PcdId[0])
+ PcdItemObj.SetCName(PcdId[1])
+
+ return PcdItemObj
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py
new file mode 100755
index 00000000..9c852f50
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py
@@ -0,0 +1,337 @@
+## @file
+# This file is used to define class objects of INF file [Ppis] section.
+# It will consumed by InfParser.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfPpiObject
+'''
+
+from Library.ParserValidate import IsValidCVariableName
+from Library.CommentParsing import ParseComment
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+
+from Library.Misc import Sdict
+from Library import DataType as DT
+import Logger.Log as Logger
+from Logger import ToolError
+from Logger import StringTable as ST
+
+def ParsePpiComment(CommentsList, InfPpiItemObj):
+ PreNotify = None
+ PreUsage = None
+ PreHelpText = ''
+ BlockFlag = -1
+ CommentInsList = []
+ Count = 0
+ for CommentItem in CommentsList:
+ Count = Count + 1
+ CommentItemUsage, \
+ CommentItemNotify, \
+ CommentItemString, \
+ CommentItemHelpText = \
+ ParseComment(CommentItem,
+ DT.ALL_USAGE_TOKENS,
+ DT.PPI_NOTIFY_TOKENS,
+ ['PPI'],
+ False)
+
+ #
+ # To avoid PyLint error
+ #
+ if CommentItemString:
+ pass
+
+ if CommentItemHelpText is None:
+ CommentItemHelpText = ''
+ if Count == len(CommentsList) and CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
+ CommentItemHelpText = DT.END_OF_LINE
+ #
+ # For the Last comment Item, set BlockFlag.
+ #
+ if Count == len(CommentsList):
+ if BlockFlag == 1 or BlockFlag == 2:
+ if CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
+ BlockFlag = 4
+ else:
+ BlockFlag = 3
+ elif BlockFlag == -1:
+ BlockFlag = 4
+
+ #
+ # Comment USAGE and NOTIFY information are "UNDEFINED"
+ #
+ if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
+ if CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
+ if BlockFlag == -1:
+ BlockFlag = 1
+ elif BlockFlag == 1:
+ BlockFlag = 2
+ else:
+ if BlockFlag == 1 or BlockFlag == 2:
+ BlockFlag = 3
+ #
+ # An item have Usage or Notify information and the first time get this information
+ #
+ elif BlockFlag == -1:
+ BlockFlag = 4
+
+ #
+ # Combine two comment line if they are generic comment
+ #
+ if CommentItemUsage == CommentItemNotify == PreUsage == PreNotify == DT.ITEM_UNDEFINED:
+ CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText
+ #
+ # Store this information for next line may still need combine operation.
+ #
+ PreHelpText = CommentItemHelpText
+
+ if BlockFlag == 4:
+ CommentItemIns = InfPpiItemCommentContent()
+ CommentItemIns.SetUsage(CommentItemUsage)
+ CommentItemIns.SetNotify(CommentItemNotify)
+ CommentItemIns.SetHelpStringItem(CommentItemHelpText)
+ CommentInsList.append(CommentItemIns)
+
+ BlockFlag = -1
+ PreUsage = None
+ PreNotify = None
+ PreHelpText = ''
+
+ elif BlockFlag == 3:
+ #
+ # Add previous help string
+ #
+ CommentItemIns = InfPpiItemCommentContent()
+ CommentItemIns.SetUsage(DT.ITEM_UNDEFINED)
+ CommentItemIns.SetNotify(DT.ITEM_UNDEFINED)
+ if PreHelpText == '' or PreHelpText.endswith(DT.END_OF_LINE):
+ PreHelpText += DT.END_OF_LINE
+ CommentItemIns.SetHelpStringItem(PreHelpText)
+ CommentInsList.append(CommentItemIns)
+ #
+ # Add Current help string
+ #
+ CommentItemIns = InfPpiItemCommentContent()
+ CommentItemIns.SetUsage(CommentItemUsage)
+ CommentItemIns.SetNotify(CommentItemNotify)
+ CommentItemIns.SetHelpStringItem(CommentItemHelpText)
+ CommentInsList.append(CommentItemIns)
+
+ BlockFlag = -1
+ PreUsage = None
+ PreNotify = None
+ PreHelpText = ''
+ else:
+ PreUsage = CommentItemUsage
+ PreNotify = CommentItemNotify
+ PreHelpText = CommentItemHelpText
+
+ InfPpiItemObj.SetCommentList(CommentInsList)
+
+ return InfPpiItemObj
+
+class InfPpiItemCommentContent():
+ def __init__(self):
+ #
+ # ## SOMETIMES_CONSUMES ## HelpString
+ #
+ self.UsageItem = ''
+ #
+ # Help String
+ #
+ self.HelpStringItem = ''
+ self.Notify = ''
+ self.CommentList = []
+
+ def SetUsage(self, UsageItem):
+ self.UsageItem = UsageItem
+ def GetUsage(self):
+ return self.UsageItem
+
+ def SetNotify(self, Notify):
+ if Notify != DT.ITEM_UNDEFINED:
+ self.Notify = 'true'
+ def GetNotify(self):
+ return self.Notify
+
+ def SetHelpStringItem(self, HelpStringItem):
+ self.HelpStringItem = HelpStringItem
+ def GetHelpStringItem(self):
+ return self.HelpStringItem
+
+class InfPpiItem():
+ def __init__(self):
+ self.Name = ''
+ self.FeatureFlagExp = ''
+ self.SupArchList = []
+ self.CommentList = []
+
+ def SetName(self, Name):
+ self.Name = Name
+ def GetName(self):
+ return self.Name
+
+ def SetSupArchList(self, SupArchList):
+ self.SupArchList = SupArchList
+ def GetSupArchList(self):
+ return self.SupArchList
+
+ def SetCommentList(self, CommentList):
+ self.CommentList = CommentList
+ def GetCommentList(self):
+ return self.CommentList
+
+ def SetFeatureFlagExp(self, FeatureFlagExp):
+ self.FeatureFlagExp = FeatureFlagExp
+ def GetFeatureFlagExp(self):
+ return self.FeatureFlagExp
+##
+#
+#
+#
+class InfPpiObject():
+ def __init__(self):
+ self.Ppis = Sdict()
+ #
+ # Macro defined in this section should be only used in this section.
+ #
+ self.Macros = {}
+
+ def SetPpi(self, PpiList, Arch = None):
+ __SupArchList = []
+ for ArchItem in Arch:
+ #
+ # Validate Arch
+ #
+ if (ArchItem == '' or ArchItem is None):
+ ArchItem = 'COMMON'
+ __SupArchList.append(ArchItem)
+
+ for Item in PpiList:
+ #
+ # Get Comment content of this protocol
+ #
+ CommentsList = None
+ if len(Item) == 3:
+ CommentsList = Item[1]
+ CurrentLineOfItem = Item[2]
+ Item = Item[0]
+ InfPpiItemObj = InfPpiItem()
+ if len(Item) >= 1 and len(Item) <= 2:
+ #
+ # Only CName contained
+ #
+ if not IsValidCVariableName(Item[0]):
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_INVALID_CNAME%(Item[0]),
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ if (Item[0] != ''):
+ InfPpiItemObj.SetName(Item[0])
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_CNAME_MISSING,
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ #
+ # Have FeatureFlag information
+ #
+ if len(Item) == 2:
+ #
+ # Contained CName and Feature Flag Express
+ # <statements> ::= <CName> ["|" <FeatureFlagExpress>]
+ # Item[1] should not be empty
+ #
+ if Item[1].strip() == '':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ #
+ # Validate Feature Flag Express for PPI entry
+ # Item[1] contain FFE information
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(Item[1].strip())
+ if not FeatureFlagRtv[0]:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ InfPpiItemObj.SetFeatureFlagExp(Item[1])
+ if len(Item) != 1 and len(Item) != 2:
+ #
+ # Invalid format of Ppi statement
+ #
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR,
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+
+ #
+ # Get/Set Usage and HelpString for PPI entry
+ #
+ if CommentsList is not None and len(CommentsList) != 0:
+ InfPpiItemObj = ParsePpiComment(CommentsList, InfPpiItemObj)
+ else:
+ CommentItemIns = InfPpiItemCommentContent()
+ CommentItemIns.SetUsage(DT.ITEM_UNDEFINED)
+ CommentItemIns.SetNotify(DT.ITEM_UNDEFINED)
+ InfPpiItemObj.SetCommentList([CommentItemIns])
+
+ InfPpiItemObj.SetSupArchList(__SupArchList)
+
+ #
+ # Determine PPI name duplicate. Follow below rule:
+ #
+ # A PPI must not be duplicated within a [Ppis] section.
+ # A PPI may appear in multiple architectural [Ppis]
+ # sections. A PPI listed in an architectural [Ppis]
+ # section must not be listed in the common architectural
+ # [Ppis] section.
+ #
+ # NOTE: This check will not report error now.
+ #
+ for Item in self.Ppis:
+ if Item.GetName() == InfPpiItemObj.GetName():
+ ItemSupArchList = Item.GetSupArchList()
+ for ItemArch in ItemSupArchList:
+ for PpiItemObjArch in __SupArchList:
+ if ItemArch == PpiItemObjArch:
+ #
+ # ST.ERR_INF_PARSER_ITEM_DUPLICATE
+ #
+ pass
+ if ItemArch.upper() == 'COMMON' or PpiItemObjArch.upper() == 'COMMON':
+ #
+ # ST.ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
+ #
+ pass
+
+ if (InfPpiItemObj) in self.Ppis:
+ PpiList = self.Ppis[InfPpiItemObj]
+ PpiList.append(InfPpiItemObj)
+ self.Ppis[InfPpiItemObj] = PpiList
+ else:
+ PpiList = []
+ PpiList.append(InfPpiItemObj)
+ self.Ppis[InfPpiItemObj] = PpiList
+
+ return True
+
+
+ def GetPpi(self):
+ return self.Ppis
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py
new file mode 100755
index 00000000..00de9272
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py
@@ -0,0 +1,305 @@
+## @file
+# This file is used to define class objects of INF file [Protocols] section.
+# It will consumed by InfParser.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfProtocolObject
+'''
+
+from Library.ParserValidate import IsValidCVariableName
+from Library.CommentParsing import ParseComment
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+
+from Library.Misc import Sdict
+
+from Object.Parser.InfMisc import ErrorInInf
+
+from Library import DataType as DT
+from Logger import StringTable as ST
+
+def ParseProtocolComment(CommentsList, InfProtocolItemObj):
+ CommentInsList = []
+ PreUsage = None
+ PreNotify = None
+ PreHelpText = ''
+ BlockFlag = -1
+ Count = 0
+ for CommentItem in CommentsList:
+ Count = Count + 1
+ CommentItemUsage, \
+ CommentItemNotify, \
+ CommentItemString, \
+ CommentItemHelpText = \
+ ParseComment(CommentItem,
+ DT.PROTOCOL_USAGE_TOKENS,
+ DT.PROTOCOL_NOTIFY_TOKENS,
+ ['PROTOCOL'],
+ False)
+
+ if CommentItemString:
+ pass
+
+ if CommentItemHelpText is None:
+ CommentItemHelpText = ''
+ if Count == len(CommentsList) and CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
+ CommentItemHelpText = DT.END_OF_LINE
+
+ if Count == len(CommentsList):
+ if BlockFlag == 1 or BlockFlag == 2:
+ if CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
+ BlockFlag = 4
+ else:
+ BlockFlag = 3
+ elif BlockFlag == -1:
+ BlockFlag = 4
+
+ if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
+ if CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
+ if BlockFlag == -1:
+ BlockFlag = 1
+ elif BlockFlag == 1:
+ BlockFlag = 2
+ else:
+ if BlockFlag == 1 or BlockFlag == 2:
+ BlockFlag = 3
+ elif BlockFlag == -1:
+ BlockFlag = 4
+
+ #
+ # Combine two comment line if they are generic comment
+ #
+ if CommentItemUsage == CommentItemNotify == PreUsage == PreNotify == DT.ITEM_UNDEFINED:
+ CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText
+
+ PreHelpText = CommentItemHelpText
+
+ if BlockFlag == 4:
+ CommentItemIns = InfProtocolItemCommentContent()
+ CommentItemIns.SetUsageItem(CommentItemUsage)
+ CommentItemIns.SetNotify(CommentItemNotify)
+ CommentItemIns.SetHelpStringItem(CommentItemHelpText)
+ CommentInsList.append(CommentItemIns)
+
+ BlockFlag = -1
+ PreUsage = None
+ PreNotify = None
+ PreHelpText = ''
+
+ elif BlockFlag == 3:
+ #
+ # Add previous help string
+ #
+ CommentItemIns = InfProtocolItemCommentContent()
+ CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
+ CommentItemIns.SetNotify(DT.ITEM_UNDEFINED)
+ if PreHelpText == '' or PreHelpText.endswith(DT.END_OF_LINE):
+ PreHelpText += DT.END_OF_LINE
+ CommentItemIns.SetHelpStringItem(PreHelpText)
+ CommentInsList.append(CommentItemIns)
+ #
+ # Add Current help string
+ #
+ CommentItemIns = InfProtocolItemCommentContent()
+ CommentItemIns.SetUsageItem(CommentItemUsage)
+ CommentItemIns.SetNotify(CommentItemNotify)
+ CommentItemIns.SetHelpStringItem(CommentItemHelpText)
+ CommentInsList.append(CommentItemIns)
+
+ BlockFlag = -1
+ PreUsage = None
+ PreNotify = None
+ PreHelpText = ''
+
+ else:
+ PreUsage = CommentItemUsage
+ PreNotify = CommentItemNotify
+ PreHelpText = CommentItemHelpText
+
+ InfProtocolItemObj.SetCommentList(CommentInsList)
+
+ return InfProtocolItemObj
+
+class InfProtocolItemCommentContent():
+ def __init__(self):
+ #
+ # ## SOMETIMES_CONSUMES ## HelpString
+ #
+ self.UsageItem = ''
+ #
+ # Help String
+ #
+ self.HelpStringItem = ''
+ self.Notify = ''
+ self.CommentList = []
+
+ def SetUsageItem(self, UsageItem):
+ self.UsageItem = UsageItem
+ def GetUsageItem(self):
+ return self.UsageItem
+
+ def SetNotify(self, Notify):
+ if Notify != DT.ITEM_UNDEFINED:
+ self.Notify = 'true'
+ def GetNotify(self):
+ return self.Notify
+
+ def SetHelpStringItem(self, HelpStringItem):
+ self.HelpStringItem = HelpStringItem
+ def GetHelpStringItem(self):
+ return self.HelpStringItem
+
+class InfProtocolItem():
+ def __init__(self):
+ self.Name = ''
+ self.FeatureFlagExp = ''
+ self.SupArchList = []
+ self.CommentList = []
+
+ def SetName(self, Name):
+ self.Name = Name
+ def GetName(self):
+ return self.Name
+
+ def SetFeatureFlagExp(self, FeatureFlagExp):
+ self.FeatureFlagExp = FeatureFlagExp
+ def GetFeatureFlagExp(self):
+ return self.FeatureFlagExp
+
+ def SetSupArchList(self, SupArchList):
+ self.SupArchList = SupArchList
+ def GetSupArchList(self):
+ return self.SupArchList
+
+ def SetCommentList(self, CommentList):
+ self.CommentList = CommentList
+ def GetCommentList(self):
+ return self.CommentList
+
+##
+#
+#
+#
+class InfProtocolObject():
+ def __init__(self):
+ self.Protocols = Sdict()
+ #
+ # Macro defined in this section should be only used in this section.
+ #
+ self.Macros = {}
+
+ def SetProtocol(self, ProtocolContent, Arch = None,):
+ __SupArchList = []
+ for ArchItem in Arch:
+ #
+ # Validate Arch
+ #
+ if (ArchItem == '' or ArchItem is None):
+ ArchItem = 'COMMON'
+ __SupArchList.append(ArchItem)
+
+ for Item in ProtocolContent:
+ #
+ # Get Comment content of this protocol
+ #
+ CommentsList = None
+ if len(Item) == 3:
+ CommentsList = Item[1]
+ CurrentLineOfItem = Item[2]
+ LineInfo = (CurrentLineOfItem[2], CurrentLineOfItem[1], CurrentLineOfItem[0])
+ Item = Item[0]
+ InfProtocolItemObj = InfProtocolItem()
+ if len(Item) >= 1 and len(Item) <= 2:
+ #
+ # Only CName contained
+ #
+ if not IsValidCVariableName(Item[0]):
+ ErrorInInf(ST.ERR_INF_PARSER_INVALID_CNAME%(Item[0]),
+ LineInfo=LineInfo)
+ if (Item[0] != ''):
+ InfProtocolItemObj.SetName(Item[0])
+ else:
+ ErrorInInf(ST.ERR_INF_PARSER_CNAME_MISSING,
+ LineInfo=LineInfo)
+ if len(Item) == 2:
+ #
+ # Contained CName and Feature Flag Express
+ # <statements> ::= <CName> ["|"
+ # <FeatureFlagExpress>]
+ # For Protocol Object
+ #
+ if Item[1].strip() == '':
+ ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
+ LineInfo=LineInfo)
+ #
+ # Validate Feature Flag Express for Item[1]
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(Item[1].strip())
+ if not FeatureFlagRtv[0]:
+ ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
+ LineInfo=LineInfo)
+ InfProtocolItemObj.SetFeatureFlagExp(Item[1])
+
+ if len(Item) < 1 or len(Item) > 2:
+ #
+ # Invalid format of Protocols statement
+ #
+ ErrorInInf(ST.ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR,
+ LineInfo=LineInfo)
+
+ #
+ # Get/Set Usage and HelpString for Protocol entry
+ #
+ if CommentsList is not None and len(CommentsList) != 0:
+ InfProtocolItemObj = ParseProtocolComment(CommentsList, InfProtocolItemObj)
+ else:
+ CommentItemIns = InfProtocolItemCommentContent()
+ CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
+ CommentItemIns.SetNotify(DT.ITEM_UNDEFINED)
+ InfProtocolItemObj.SetCommentList([CommentItemIns])
+
+ InfProtocolItemObj.SetSupArchList(__SupArchList)
+
+ #
+ # Determine protocol name duplicate. Follow below rule:
+ #
+ # A protocol must not be duplicated within a [Protocols] section.
+ # A protocol may appear in multiple architectural [Protocols]
+ # sections. A protocol listed in an architectural [Protocols]
+ # section must not be listed in the common architectural
+ # [Protocols] section.
+ #
+ # NOTE: This check will not report error now.
+ #
+ for Item in self.Protocols:
+ if Item.GetName() == InfProtocolItemObj.GetName():
+ ItemSupArchList = Item.GetSupArchList()
+ for ItemArch in ItemSupArchList:
+ for ProtocolItemObjArch in __SupArchList:
+ if ItemArch == ProtocolItemObjArch:
+ #
+ # ST.ERR_INF_PARSER_ITEM_DUPLICATE
+ #
+ pass
+ if ItemArch.upper() == 'COMMON' or ProtocolItemObjArch.upper() == 'COMMON':
+ #
+ # ST.ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
+ #
+ pass
+
+ if (InfProtocolItemObj) in self.Protocols:
+ ProcotolList = self.Protocols[InfProtocolItemObj]
+ ProcotolList.append(InfProtocolItemObj)
+ self.Protocols[InfProtocolItemObj] = ProcotolList
+ else:
+ ProcotolList = []
+ ProcotolList.append(InfProtocolItemObj)
+ self.Protocols[InfProtocolItemObj] = ProcotolList
+
+ return True
+
+ def GetProtocol(self):
+ return self.Protocols
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py
new file mode 100755
index 00000000..de95c2f1
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py
@@ -0,0 +1,233 @@
+## @file
+# This file is used to define class objects of INF file [Sources] section.
+# It will consumed by InfParser.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfSourcesObject
+'''
+
+import os
+
+from Logger import StringTable as ST
+from Logger import ToolError
+import Logger.Log as Logger
+from Library import GlobalData
+
+from Library.Misc import Sdict
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+from Object.Parser.InfCommonObject import InfSectionCommonDef
+from Library.Misc import ValidFile
+from Library.ParserValidate import IsValidFamily
+from Library.ParserValidate import IsValidPath
+
+## __GenSourceInstance
+#
+#
+def GenSourceInstance(Item, CurrentLineOfItem, ItemObj):
+
+ IsValidFileFlag = False
+
+ if len(Item) < 6 and len(Item) >= 1:
+ #
+ # File | Family | TagName | ToolCode | FeatureFlagExpr
+ #
+ if len(Item) == 5:
+ #
+ # Validate Feature Flag Express
+ #
+ if Item[4].strip() == '':
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ #
+ # Validate FFE
+ #
+ FeatureFlagRtv = IsValidFeatureFlagExp(Item[4].strip())
+ if not FeatureFlagRtv[0]:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ ItemObj.SetFeatureFlagExp(Item[4])
+ if len(Item) >= 4:
+ if Item[3].strip() == '':
+ ItemObj.SetToolCode(Item[3])
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_TOOLCODE_NOT_PERMITTED%(Item[2]),
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ if len(Item) >= 3:
+ if Item[2].strip() == '':
+ ItemObj.SetTagName(Item[2])
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_TAGNAME_NOT_PERMITTED%(Item[2]),
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ if len(Item) >= 2:
+ if IsValidFamily(Item[1].strip()):
+ #
+ # To align with UDP specification. "*" is not permitted in UDP specification
+ #
+ if Item[1].strip() == "*":
+ Item[1] = ""
+ ItemObj.SetFamily(Item[1])
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_SOURCE_SECTION_FAMILY_INVALID%(Item[1]),
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ if len(Item) >= 1:
+ #
+ # Validate file name exist.
+ #
+ FullFileName = os.path.normpath(os.path.realpath(os.path.join(GlobalData.gINF_MODULE_DIR, Item[0])))
+ if not (ValidFile(FullFileName) or ValidFile(Item[0])):
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_FILELIST_EXIST%(Item[0]),
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+
+ #
+ # Validate file exist/format.
+ #
+
+ if IsValidPath(Item[0], GlobalData.gINF_MODULE_DIR):
+ IsValidFileFlag = True
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(Item[0]),
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+ return False
+ if IsValidFileFlag:
+ ItemObj.SetSourceFileName(Item[0])
+ else:
+ Logger.Error("InfParser",
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_SOURCES_SECTION_CONTENT_ERROR,
+ File=CurrentLineOfItem[2],
+ Line=CurrentLineOfItem[1],
+ ExtraData=CurrentLineOfItem[0])
+
+ return ItemObj
+
+## InfSourcesItemObject()
+#
+#
+class InfSourcesItemObject():
+ def __init__(self, \
+ SourceFileName = '', \
+ Family = '', \
+ TagName = '', \
+ ToolCode = '', \
+ FeatureFlagExp = ''):
+ self.SourceFileName = SourceFileName
+ self.Family = Family
+ self.TagName = TagName
+ self.ToolCode = ToolCode
+ self.FeatureFlagExp = FeatureFlagExp
+ self.HeaderString = ''
+ self.TailString = ''
+ self.SupArchList = []
+
+ def SetSourceFileName(self, SourceFilename):
+ self.SourceFileName = SourceFilename
+ def GetSourceFileName(self):
+ return self.SourceFileName
+
+ def SetFamily(self, Family):
+ self.Family = Family
+ def GetFamily(self):
+ return self.Family
+
+ def SetTagName(self, TagName):
+ self.TagName = TagName
+ def GetTagName(self):
+ return self.TagName
+
+ def SetToolCode(self, ToolCode):
+ self.ToolCode = ToolCode
+ def GetToolCode(self):
+ return self.ToolCode
+
+ def SetFeatureFlagExp(self, FeatureFlagExp):
+ self.FeatureFlagExp = FeatureFlagExp
+ def GetFeatureFlagExp(self):
+ return self.FeatureFlagExp
+
+ def SetHeaderString(self, HeaderString):
+ self.HeaderString = HeaderString
+ def GetHeaderString(self):
+ return self.HeaderString
+
+ def SetTailString(self, TailString):
+ self.TailString = TailString
+ def GetTailString(self):
+ return self.TailString
+
+ def SetSupArchList(self, SupArchList):
+ self.SupArchList = SupArchList
+ def GetSupArchList(self):
+ return self.SupArchList
+##
+#
+#
+#
+class InfSourcesObject(InfSectionCommonDef):
+ def __init__(self):
+ self.Sources = Sdict()
+ InfSectionCommonDef.__init__(self)
+
+ def SetSources(self, SourceList, Arch = None):
+ __SupArchList = []
+ for ArchItem in Arch:
+ #
+ # Validate Arch
+ #
+ if (ArchItem == '' or ArchItem is None):
+ ArchItem = 'COMMON'
+ __SupArchList.append(ArchItem)
+
+ for Item in SourceList:
+ ItemObj = InfSourcesItemObject()
+ CurrentLineOfItem = Item[2]
+ Item = Item[0]
+
+ ItemObj = GenSourceInstance(Item, CurrentLineOfItem, ItemObj)
+
+ ItemObj.SetSupArchList(__SupArchList)
+
+ if (ItemObj) in self.Sources:
+ SourceContent = self.Sources[ItemObj]
+ SourceContent.append(ItemObj)
+ self.Sources[ItemObj] = SourceContent
+ else:
+ SourceContent = []
+ SourceContent.append(ItemObj)
+ self.Sources[ItemObj] = SourceContent
+
+ return True
+
+ def GetSources(self):
+ return self.Sources
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfUserExtensionObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfUserExtensionObject.py
new file mode 100755
index 00000000..c7593bfa
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/InfUserExtensionObject.py
@@ -0,0 +1,127 @@
+## @file
+# This file is used to define class objects of INF file [UserExtension] section.
+# It will consumed by InfParser.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+InfUserExtensionsObject
+'''
+
+from Logger import StringTable as ST
+from Logger import ToolError
+import Logger.Log as Logger
+from Library import GlobalData
+
+from Library.Misc import Sdict
+
+class InfUserExtensionItem():
+ def __init__(self,
+ Content = '',
+ UserId = '',
+ IdString = ''):
+ self.Content = Content
+ self.UserId = UserId
+ self.IdString = IdString
+ self.SupArchList = []
+
+ def SetContent(self, Content):
+ self.Content = Content
+ def GetContent(self):
+ return self.Content
+
+ def SetUserId(self, UserId):
+ self.UserId = UserId
+ def GetUserId(self):
+ return self.UserId
+
+ def SetIdString(self, IdString):
+ self.IdString = IdString
+ def GetIdString(self):
+ return self.IdString
+
+ def SetSupArchList(self, SupArchList):
+ self.SupArchList = SupArchList
+ def GetSupArchList(self):
+ return self.SupArchList
+
+##
+#
+#
+#
+class InfUserExtensionObject():
+ def __init__(self):
+ self.UserExtension = Sdict()
+
+ def SetUserExtension(self, UserExtensionCont, IdContent=None, LineNo=None):
+ if not UserExtensionCont or UserExtensionCont == '':
+ return True
+ #
+ # IdContent is a list contain UserId and IdString
+ # For this call the general section header parser, if no definition of
+ # IdString/UserId, it will return 'COMMON'
+ #
+ for IdContentItem in IdContent:
+ InfUserExtensionItemObj = InfUserExtensionItem()
+ if IdContentItem[0] == 'COMMON':
+ UserId = ''
+ else:
+ UserId = IdContentItem[0]
+
+ if IdContentItem[1] == 'COMMON':
+ IdString = ''
+ else:
+ IdString = IdContentItem[1]
+
+ #
+ # Fill UserExtensionObj members.
+ #
+ InfUserExtensionItemObj.SetUserId(UserId)
+ InfUserExtensionItemObj.SetIdString(IdString)
+ InfUserExtensionItemObj.SetContent(UserExtensionCont)
+ InfUserExtensionItemObj.SetSupArchList(IdContentItem[2])
+
+# for CheckItem in self.UserExtension:
+# if IdContentItem[0] == CheckItem[0] and IdContentItem[1] == CheckItem[1]:
+# if IdContentItem[2].upper() == 'COMMON' or CheckItem[2].upper() == 'COMMON':
+# #
+# # For COMMON ARCH type, do special check.
+# #
+# Logger.Error('InfParser',
+# ToolError.FORMAT_INVALID,
+# ST.ERR_INF_PARSER_UE_SECTION_DUPLICATE_ERROR%\
+# (IdContentItem[0] + '.' + IdContentItem[1] + '.' + IdContentItem[2]),
+# File=GlobalData.gINF_MODULE_NAME,
+# Line=LineNo,
+# ExtraData=None)
+
+ if IdContentItem in self.UserExtension:
+ #
+ # Each UserExtensions section header must have a unique set
+ # of UserId, IdString and Arch values.
+ # This means that the same UserId can be used in more than one
+ # section header, provided the IdString or Arch values are
+ # different. The same IdString values can be used in more than
+ # one section header if the UserId or Arch values are
+ # different. The same UserId and the same IdString can be used
+ # in a section header if the Arch values are different in each
+ # of the section headers.
+ #
+ Logger.Error('InfParser',
+ ToolError.FORMAT_INVALID,
+ ST.ERR_INF_PARSER_UE_SECTION_DUPLICATE_ERROR%\
+ (IdContentItem[0] + '.' + IdContentItem[1] + '.' + IdContentItem[2]),
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=LineNo,
+ ExtraData=None)
+ else:
+ UserExtensionList = []
+ UserExtensionList.append(InfUserExtensionItemObj)
+ self.UserExtension[IdContentItem] = UserExtensionList
+
+ return True
+
+ def GetUserExtension(self):
+ return self.UserExtension
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/__init__.py
new file mode 100644
index 00000000..5174f309
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/Parser/__init__.py
@@ -0,0 +1,14 @@
+## @file
+# Python 'Object' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+PARSER
+'''
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/__init__.py
new file mode 100644
index 00000000..2e53a314
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Object/__init__.py
@@ -0,0 +1,14 @@
+## @file
+# Python 'Object' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Object
+'''
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/DecParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/DecParser.py
new file mode 100755
index 00000000..f6b8545b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/DecParser.py
@@ -0,0 +1,1091 @@
+## @file
+# This file is used to parse DEC file. It will consumed by DecParser
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+'''
+DecParser
+'''
+## Import modules
+#
+import Logger.Log as Logger
+from Logger.ToolError import FILE_PARSE_FAILURE
+from Logger.ToolError import FILE_OPEN_FAILURE
+from Logger import StringTable as ST
+from Logger.ToolError import FORMAT_INVALID
+
+import Library.DataType as DT
+from Library.ParserValidate import IsValidToken
+from Library.ParserValidate import IsValidPath
+from Library.ParserValidate import IsValidCFormatGuid
+from Library.ParserValidate import IsValidIdString
+from Library.ParserValidate import IsValidUserId
+from Library.ParserValidate import IsValidArch
+from Library.ParserValidate import IsValidWord
+from Library.ParserValidate import IsValidDecVersionVal
+from Parser.DecParserMisc import TOOL_NAME
+from Parser.DecParserMisc import CleanString
+from Parser.DecParserMisc import IsValidPcdDatum
+from Parser.DecParserMisc import ParserHelper
+from Parser.DecParserMisc import StripRoot
+from Parser.DecParserMisc import VERSION_PATTERN
+from Parser.DecParserMisc import CVAR_PATTERN
+from Parser.DecParserMisc import PCD_TOKEN_PATTERN
+from Parser.DecParserMisc import MACRO_PATTERN
+from Parser.DecParserMisc import FileContent
+from Object.Parser.DecObject import _DecComments
+from Object.Parser.DecObject import DecDefineObject
+from Object.Parser.DecObject import DecDefineItemObject
+from Object.Parser.DecObject import DecIncludeObject
+from Object.Parser.DecObject import DecIncludeItemObject
+from Object.Parser.DecObject import DecLibraryclassObject
+from Object.Parser.DecObject import DecLibraryclassItemObject
+from Object.Parser.DecObject import DecGuidObject
+from Object.Parser.DecObject import DecPpiObject
+from Object.Parser.DecObject import DecProtocolObject
+from Object.Parser.DecObject import DecGuidItemObject
+from Object.Parser.DecObject import DecUserExtensionObject
+from Object.Parser.DecObject import DecUserExtensionItemObject
+from Object.Parser.DecObject import DecPcdObject
+from Object.Parser.DecObject import DecPcdItemObject
+from Library.Misc import GuidStructureStringToGuidString
+from Library.Misc import CheckGuidRegFormat
+from Library.StringUtils import ReplaceMacro
+from Library.StringUtils import GetSplitValueList
+from Library.StringUtils import gMACRO_PATTERN
+from Library.StringUtils import ConvertSpecialChar
+from Library.CommentParsing import ParsePcdErrorCode
+
+##
+# _DecBase class for parsing
+#
+class _DecBase:
+ def __init__(self, RawData):
+ self._RawData = RawData
+ self._ItemDict = {}
+ self._LocalMacro = {}
+ #
+ # Data parsed by 'self' are saved to this object
+ #
+ self.ItemObject = None
+
+ def GetDataObject(self):
+ return self.ItemObject
+
+ def GetLocalMacro(self):
+ return self._LocalMacro
+
+ ## BlockStart
+ #
+ # Called if a new section starts
+ #
+ def BlockStart(self):
+ self._LocalMacro = {}
+
+ ## _CheckReDefine
+ #
+ # @param Key: to be checked if multi-defined
+ # @param Scope: Format: [[SectionName, Arch], ...].
+ # If scope is none, use global scope
+ #
+ def _CheckReDefine(self, Key, Scope = None):
+ if not Scope:
+ Scope = self._RawData.CurrentScope
+ return
+
+ SecArch = []
+ #
+ # Copy scope to SecArch, avoid Scope be changed outside
+ #
+ SecArch[0:1] = Scope[:]
+ if Key not in self._ItemDict:
+ self._ItemDict[Key] = [[SecArch, self._RawData.LineIndex]]
+ return
+
+ for Value in self._ItemDict[Key]:
+ for SubValue in Scope:
+ #
+ # If current is common section
+ #
+ if SubValue[-1] == 'COMMON':
+ for Other in Value[0]:
+ # Key in common cannot be redefined in other arches
+ # [:-1] means stripping arch info
+ if Other[:-1] == SubValue[:-1]:
+ self._LoggerError(ST.ERR_DECPARSE_REDEFINE % (Key, Value[1]))
+ return
+ continue
+ CommonScope = []
+ CommonScope[0:1] = SubValue
+ CommonScope[-1] = 'COMMON'
+ #
+ # Cannot be redefined if this key already defined in COMMON Or defined in same arch
+ #
+ if SubValue in Value[0] or CommonScope in Value[0]:
+ self._LoggerError(ST.ERR_DECPARSE_REDEFINE % (Key, Value[1]))
+ return
+ self._ItemDict[Key].append([SecArch, self._RawData.LineIndex])
+
+ ## CheckRequiredFields
+ # Some sections need to check if some fields exist, define section for example
+ # Derived class can re-implement, top parser will call this function after all parsing done
+ #
+ def CheckRequiredFields(self):
+ if self._RawData:
+ pass
+ return True
+
+ ## IsItemRequired
+ # In DEC spec, sections must have at least one statement except user
+ # extension.
+ # For example: "[guids" [<attribs>] "]" <EOL> <statements>+
+ # sub class can override this method to indicate if statement is a must.
+ #
+ def _IsStatementRequired(self):
+ if self._RawData:
+ pass
+ return False
+
+ def _LoggerError(self, ErrorString):
+ Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
+ Line = self._RawData.LineIndex,
+ ExtraData=ErrorString + ST.ERR_DECPARSE_LINE % self._RawData.CurrentLine)
+
+ def _ReplaceMacro(self, String):
+ if gMACRO_PATTERN.findall(String):
+ String = ReplaceMacro(String, self._LocalMacro, False,
+ FileName = self._RawData.Filename,
+ Line = ['', self._RawData.LineIndex])
+ String = ReplaceMacro(String, self._RawData.Macros, False,
+ FileName = self._RawData.Filename,
+ Line = ['', self._RawData.LineIndex])
+ MacroUsed = gMACRO_PATTERN.findall(String)
+ if MacroUsed:
+ Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE,
+ File=self._RawData.Filename,
+ Line = self._RawData.LineIndex,
+ ExtraData = ST.ERR_DECPARSE_MACRO_RESOLVE % (str(MacroUsed), String))
+ return String
+
+ def _MacroParser(self, String):
+ TokenList = GetSplitValueList(String, ' ', 1)
+ if len(TokenList) < 2 or TokenList[1] == '':
+ self._LoggerError(ST.ERR_DECPARSE_MACRO_PAIR)
+
+ TokenList = GetSplitValueList(TokenList[1], DT.TAB_EQUAL_SPLIT, 1)
+ if TokenList[0] == '':
+ self._LoggerError(ST.ERR_DECPARSE_MACRO_NAME)
+ elif not IsValidToken(MACRO_PATTERN, TokenList[0]):
+ self._LoggerError(ST.ERR_DECPARSE_MACRO_NAME_UPPER % TokenList[0])
+
+ if len(TokenList) == 1:
+ self._LocalMacro[TokenList[0]] = ''
+ else:
+ self._LocalMacro[TokenList[0]] = self._ReplaceMacro(TokenList[1])
+
+ ## _ParseItem
+ #
+ # Parse specified item, this function must be derived by subclass
+ #
+ def _ParseItem(self):
+ if self._RawData:
+ pass
+ #
+ # Should never be called
+ #
+ return None
+
+
+ ## _TailCommentStrategy
+ #
+ # This function can be derived to parse tail comment
+ # default is it will not consume any lines
+ #
+ # @param Comment: Comment of current line
+ #
+ def _TailCommentStrategy(self, Comment):
+ if Comment:
+ pass
+ if self._RawData:
+ pass
+ return False
+
+ ## _StopCurrentParsing
+ #
+ # Called in Parse if current parsing should be stopped when encounter some
+ # keyword
+ # Default is section start and end
+ #
+ # @param Line: Current line
+ #
+ def _StopCurrentParsing(self, Line):
+ if self._RawData:
+ pass
+ return Line[0] == DT.TAB_SECTION_START and Line[-1] == DT.TAB_SECTION_END
+
+ ## _TryBackSlash
+ #
+ # Split comment and DEC content, concatenate lines if end of char is '\'
+ #
+ # @param ProcessedLine: ProcessedLine line
+ # @param ProcessedComments: ProcessedComments line
+ #
+ def _TryBackSlash(self, ProcessedLine, ProcessedComments):
+ CatLine = ''
+ Comment = ''
+ Line = ProcessedLine
+ CommentList = ProcessedComments
+ while not self._RawData.IsEndOfFile():
+ if Line == '':
+ self._LoggerError(ST.ERR_DECPARSE_BACKSLASH_EMPTY)
+ break
+
+ if Comment:
+ CommentList.append((Comment, self._RawData.LineIndex))
+ if Line[-1] != DT.TAB_SLASH:
+ CatLine += Line
+ break
+ elif len(Line) < 2 or Line[-2] != ' ':
+ self._LoggerError(ST.ERR_DECPARSE_BACKSLASH)
+ else:
+ CatLine += Line[:-1]
+ Line, Comment = CleanString(self._RawData.GetNextLine())
+ #
+ # Reach end of content
+ #
+ if self._RawData.IsEndOfFile():
+ if not CatLine:
+ if ProcessedLine[-1] == DT.TAB_SLASH:
+ self._LoggerError(ST.ERR_DECPARSE_BACKSLASH_EMPTY)
+ CatLine = ProcessedLine
+ else:
+ if not Line or Line[-1] == DT.TAB_SLASH:
+ self._LoggerError(ST.ERR_DECPARSE_BACKSLASH_EMPTY)
+ CatLine += Line
+
+ #
+ # All MACRO values defined by the DEFINE statements in any section
+ # (except [Userextensions] sections for Intel) of the INF or DEC file
+ # must be expanded before processing of the file.
+ #
+ __IsReplaceMacro = True
+ Header = self._RawData.CurrentScope[0] if self._RawData.CurrentScope else None
+ if Header and len(Header) > 2:
+ if Header[0].upper() == 'USEREXTENSIONS' and not (Header[1] == 'TianoCore' and Header[2] == '"ExtraFiles"'):
+ __IsReplaceMacro = False
+ if __IsReplaceMacro:
+ self._RawData.CurrentLine = self._ReplaceMacro(CatLine)
+ else:
+ self._RawData.CurrentLine = CatLine
+
+ return CatLine, CommentList
+
+ ## Parse
+ # This is a template method in which other member functions which might
+ # override by sub class are called. It is responsible for reading file
+ # line by line, and call other member functions to parse. This function
+ # should not be re-implement by sub class.
+ #
+ def Parse(self):
+ HeadComments = []
+ TailComments = []
+
+ #======================================================================
+ # CurComments may pointer to HeadComments or TailComments
+ #======================================================================
+ CurComments = HeadComments
+ CurObj = None
+ ItemNum = 0
+ FromBuf = False
+
+ #======================================================================
+ # Used to report error information if empty section found
+ #======================================================================
+ Index = self._RawData.LineIndex
+ LineStr = self._RawData.CurrentLine
+ while not self._RawData.IsEndOfFile() or self._RawData.NextLine:
+ if self._RawData.NextLine:
+ #==============================================================
+ # Have processed line in buffer
+ #==============================================================
+ Line = self._RawData.NextLine
+ HeadComments.extend(self._RawData.HeadComment)
+ TailComments.extend(self._RawData.TailComment)
+ self._RawData.ResetNext()
+ Comment = ''
+ FromBuf = True
+ else:
+ #==============================================================
+ # No line in buffer, read next line
+ #==============================================================
+ Line, Comment = CleanString(self._RawData.GetNextLine())
+ FromBuf = False
+ if Line:
+ if not FromBuf and CurObj and TailComments:
+ #==========================================================
+ # Set tail comments to previous statement if not empty.
+ #==========================================================
+ CurObj.SetTailComment(CurObj.GetTailComment()+TailComments)
+
+ if not FromBuf:
+ del TailComments[:]
+ CurComments = TailComments
+ Comments = []
+ if Comment:
+ Comments = [(Comment, self._RawData.LineIndex)]
+
+ #==============================================================
+ # Try if last char of line has backslash
+ #==============================================================
+ Line, Comments = self._TryBackSlash(Line, Comments)
+ CurComments.extend(Comments)
+
+ #==============================================================
+ # Macro found
+ #==============================================================
+ if Line.startswith('DEFINE '):
+ self._MacroParser(Line)
+ del HeadComments[:]
+ del TailComments[:]
+ CurComments = HeadComments
+ continue
+
+ if self._StopCurrentParsing(Line):
+ #==========================================================
+ # This line does not belong to this parse,
+ # Save it, can be used by next parse
+ #==========================================================
+ self._RawData.SetNext(Line, HeadComments, TailComments)
+ break
+
+ Obj = self._ParseItem()
+ ItemNum += 1
+ if Obj:
+ Obj.SetHeadComment(Obj.GetHeadComment()+HeadComments)
+ Obj.SetTailComment(Obj.GetTailComment()+TailComments)
+ del HeadComments[:]
+ del TailComments[:]
+ CurObj = Obj
+ else:
+ CurObj = None
+ else:
+ if id(CurComments) == id(TailComments):
+ #==========================================================
+ # Check if this comment belongs to tail comment
+ #==========================================================
+ if not self._TailCommentStrategy(Comment):
+ CurComments = HeadComments
+
+ if Comment:
+ CurComments.append(((Comment, self._RawData.LineIndex)))
+ else:
+ del CurComments[:]
+
+ if self._IsStatementRequired() and ItemNum == 0:
+ Logger.Error(
+ TOOL_NAME, FILE_PARSE_FAILURE,
+ File=self._RawData.Filename,
+ Line=Index,
+ ExtraData=ST.ERR_DECPARSE_STATEMENT_EMPTY % LineStr
+ )
+
+## _DecDefine
+# Parse define section
+#
+class _DecDefine(_DecBase):
+ def __init__(self, RawData):
+ _DecBase.__init__(self, RawData)
+ self.ItemObject = DecDefineObject(RawData.Filename)
+ self._LocalMacro = self._RawData.Macros
+ self._DefSecNum = 0
+
+ #
+ # Each field has a function to validate
+ #
+ self.DefineValidation = {
+ DT.TAB_DEC_DEFINES_DEC_SPECIFICATION : self._SetDecSpecification,
+ DT.TAB_DEC_DEFINES_PACKAGE_NAME : self._SetPackageName,
+ DT.TAB_DEC_DEFINES_PACKAGE_GUID : self._SetPackageGuid,
+ DT.TAB_DEC_DEFINES_PACKAGE_VERSION : self._SetPackageVersion,
+ DT.TAB_DEC_DEFINES_PKG_UNI_FILE : self._SetPackageUni,
+ }
+
+ def BlockStart(self):
+ self._DefSecNum += 1
+ if self._DefSecNum > 1:
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_MULTISEC)
+
+ ## CheckRequiredFields
+ #
+ # Check required fields: DEC_SPECIFICATION, PACKAGE_NAME
+ # PACKAGE_GUID, PACKAGE_VERSION
+ #
+ def CheckRequiredFields(self):
+ Ret = False
+ if self.ItemObject.GetPackageSpecification() == '':
+ Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
+ ExtraData=ST.ERR_DECPARSE_DEFINE_REQUIRED % DT.TAB_DEC_DEFINES_DEC_SPECIFICATION)
+ elif self.ItemObject.GetPackageName() == '':
+ Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
+ ExtraData=ST.ERR_DECPARSE_DEFINE_REQUIRED % DT.TAB_DEC_DEFINES_PACKAGE_NAME)
+ elif self.ItemObject.GetPackageGuid() == '':
+ Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
+ ExtraData=ST.ERR_DECPARSE_DEFINE_REQUIRED % DT.TAB_DEC_DEFINES_PACKAGE_GUID)
+ elif self.ItemObject.GetPackageVersion() == '':
+ Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
+ ExtraData=ST.ERR_DECPARSE_DEFINE_REQUIRED % DT.TAB_DEC_DEFINES_PACKAGE_VERSION)
+ else:
+ Ret = True
+ return Ret
+
+ def _ParseItem(self):
+ Line = self._RawData.CurrentLine
+ TokenList = GetSplitValueList(Line, DT.TAB_EQUAL_SPLIT, 1)
+ if TokenList[0] == DT.TAB_DEC_DEFINES_PKG_UNI_FILE:
+ self.DefineValidation[TokenList[0]](TokenList[1])
+ elif len(TokenList) < 2:
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_FORMAT)
+ elif TokenList[0] not in self.DefineValidation:
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_UNKNOWKEY % TokenList[0])
+ else:
+ self.DefineValidation[TokenList[0]](TokenList[1])
+
+ DefineItem = DecDefineItemObject()
+ DefineItem.Key = TokenList[0]
+ DefineItem.Value = TokenList[1]
+ self.ItemObject.AddItem(DefineItem, self._RawData.CurrentScope)
+ return DefineItem
+
+ def _SetDecSpecification(self, Token):
+ if self.ItemObject.GetPackageSpecification():
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_DEC_SPECIFICATION)
+ if not IsValidToken('0[xX][0-9a-fA-F]{8}', Token):
+ if not IsValidDecVersionVal(Token):
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_SPEC)
+ self.ItemObject.SetPackageSpecification(Token)
+
+ def _SetPackageName(self, Token):
+ if self.ItemObject.GetPackageName():
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_PACKAGE_NAME)
+ if not IsValidWord(Token):
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_PKGNAME)
+ self.ItemObject.SetPackageName(Token)
+
+ def _SetPackageGuid(self, Token):
+ if self.ItemObject.GetPackageGuid():
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_PACKAGE_GUID)
+ if not CheckGuidRegFormat(Token):
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_PKGGUID)
+ self.ItemObject.SetPackageGuid(Token)
+
+ def _SetPackageVersion(self, Token):
+ if self.ItemObject.GetPackageVersion():
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_PACKAGE_VERSION)
+ if not IsValidToken(VERSION_PATTERN, Token):
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_PKGVERSION)
+ else:
+ if not DT.TAB_SPLIT in Token:
+ Token = Token + '.0'
+ self.ItemObject.SetPackageVersion(Token)
+
+ def _SetPackageUni(self, Token):
+ if self.ItemObject.GetPackageUniFile():
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_PKG_UNI_FILE)
+ self.ItemObject.SetPackageUniFile(Token)
+
+## _DecInclude
+#
+# Parse include section
+#
+class _DecInclude(_DecBase):
+ def __init__(self, RawData):
+ _DecBase.__init__(self, RawData)
+ self.ItemObject = DecIncludeObject(RawData.Filename)
+
+ def _ParseItem(self):
+ Line = self._RawData.CurrentLine
+
+ if not IsValidPath(Line, self._RawData.PackagePath):
+ self._LoggerError(ST.ERR_DECPARSE_INCLUDE % Line)
+
+ Item = DecIncludeItemObject(StripRoot(self._RawData.PackagePath, Line), self._RawData.PackagePath)
+ self.ItemObject.AddItem(Item, self._RawData.CurrentScope)
+ return Item
+
+## _DecLibraryclass
+#
+# Parse library class section
+#
+class _DecLibraryclass(_DecBase):
+ def __init__(self, RawData):
+ _DecBase.__init__(self, RawData)
+ self.ItemObject = DecLibraryclassObject(RawData.Filename)
+
+ def _ParseItem(self):
+ Line = self._RawData.CurrentLine
+ TokenList = GetSplitValueList(Line, DT.TAB_VALUE_SPLIT)
+ if len(TokenList) != 2:
+ self._LoggerError(ST.ERR_DECPARSE_LIBCLASS_SPLIT)
+ if TokenList[0] == '' or TokenList[1] == '':
+ self._LoggerError(ST.ERR_DECPARSE_LIBCLASS_EMPTY)
+ if not IsValidToken('[A-Z][0-9A-Za-z]*', TokenList[0]):
+ self._LoggerError(ST.ERR_DECPARSE_LIBCLASS_LIB)
+
+ self._CheckReDefine(TokenList[0])
+
+ Value = TokenList[1]
+ #
+ # Must end with .h
+ #
+ if not Value.endswith('.h'):
+ self._LoggerError(ST.ERR_DECPARSE_LIBCLASS_PATH_EXT)
+
+ #
+ # Path must be existed
+ #
+ if not IsValidPath(Value, self._RawData.PackagePath):
+ self._LoggerError(ST.ERR_DECPARSE_INCLUDE % Value)
+
+ Item = DecLibraryclassItemObject(TokenList[0], StripRoot(self._RawData.PackagePath, Value),
+ self._RawData.PackagePath)
+ self.ItemObject.AddItem(Item, self._RawData.CurrentScope)
+ return Item
+
+## _DecPcd
+#
+# Parse PCD section
+#
+class _DecPcd(_DecBase):
+ def __init__(self, RawData):
+ _DecBase.__init__(self, RawData)
+ self.ItemObject = DecPcdObject(RawData.Filename)
+ #
+ # Used to check duplicate token
+ # Key is token space and token number (integer), value is C name
+ #
+ self.TokenMap = {}
+
+ def _ParseItem(self):
+ Line = self._RawData.CurrentLine
+ TokenList = Line.split(DT.TAB_VALUE_SPLIT)
+ if len(TokenList) < 4:
+ self._LoggerError(ST.ERR_DECPARSE_PCD_SPLIT)
+
+ #
+ # Token space guid C name
+ #
+ PcdName = GetSplitValueList(TokenList[0], DT.TAB_SPLIT)
+ if len(PcdName) != 2 or PcdName[0] == '' or PcdName[1] == '':
+ self._LoggerError(ST.ERR_DECPARSE_PCD_NAME)
+
+ Guid = PcdName[0]
+ if not IsValidToken(CVAR_PATTERN, Guid):
+ self._LoggerError(ST.ERR_DECPARSE_PCD_CVAR_GUID)
+
+ #
+ # PCD C name
+ #
+ CName = PcdName[1]
+ if not IsValidToken(CVAR_PATTERN, CName):
+ self._LoggerError(ST.ERR_DECPARSE_PCD_CVAR_PCDCNAME)
+
+ self._CheckReDefine(Guid + DT.TAB_SPLIT + CName)
+
+ #
+ # Default value, may be C array, string or number
+ #
+ Data = DT.TAB_VALUE_SPLIT.join(TokenList[1:-2]).strip()
+
+ #
+ # PCD data type
+ #
+ DataType = TokenList[-2].strip()
+ Valid, Cause = IsValidPcdDatum(DataType, Data)
+ if not Valid:
+ self._LoggerError(Cause)
+ PcdType = self._RawData.CurrentScope[0][0]
+ if PcdType == DT.TAB_PCDS_FEATURE_FLAG_NULL.upper() and DataType != 'BOOLEAN':
+ self._LoggerError(ST.ERR_DECPARSE_PCD_FEATUREFLAG)
+ #
+ # Token value is the last element in list.
+ #
+ Token = TokenList[-1].strip()
+ if not IsValidToken(PCD_TOKEN_PATTERN, Token):
+ self._LoggerError(ST.ERR_DECPARSE_PCD_TOKEN % Token)
+ elif not Token.startswith('0x') and not Token.startswith('0X'):
+ if int(Token) > 4294967295:
+ self._LoggerError(ST.ERR_DECPARSE_PCD_TOKEN_INT % Token)
+ Token = '0x%x' % int(Token)
+
+ IntToken = int(Token, 0)
+ if (Guid, IntToken) in self.TokenMap:
+ if self.TokenMap[Guid, IntToken] != CName:
+ self._LoggerError(ST.ERR_DECPARSE_PCD_TOKEN_UNIQUE%(Token))
+ else:
+ self.TokenMap[Guid, IntToken] = CName
+
+ Item = DecPcdItemObject(Guid, CName, Data, DataType, Token)
+ self.ItemObject.AddItem(Item, self._RawData.CurrentScope)
+ return Item
+
+## _DecGuid
+#
+# Parse GUID, PPI, Protocol section
+#
+class _DecGuid(_DecBase):
+ def __init__(self, RawData):
+ _DecBase.__init__(self, RawData)
+ self.GuidObj = DecGuidObject(RawData.Filename)
+ self.PpiObj = DecPpiObject(RawData.Filename)
+ self.ProtocolObj = DecProtocolObject(RawData.Filename)
+ self.ObjectDict = \
+ {
+ DT.TAB_GUIDS.upper() : self.GuidObj,
+ DT.TAB_PPIS.upper() : self.PpiObj,
+ DT.TAB_PROTOCOLS.upper() : self.ProtocolObj
+ }
+
+ def GetDataObject(self):
+ if self._RawData.CurrentScope:
+ return self.ObjectDict[self._RawData.CurrentScope[0][0]]
+ return None
+
+ def GetGuidObject(self):
+ return self.GuidObj
+
+ def GetPpiObject(self):
+ return self.PpiObj
+
+ def GetProtocolObject(self):
+ return self.ProtocolObj
+
+ def _ParseItem(self):
+ Line = self._RawData.CurrentLine
+ TokenList = GetSplitValueList(Line, DT.TAB_EQUAL_SPLIT, 1)
+ if len(TokenList) < 2:
+ self._LoggerError(ST.ERR_DECPARSE_CGUID)
+ if TokenList[0] == '':
+ self._LoggerError(ST.ERR_DECPARSE_CGUID_NAME)
+ if TokenList[1] == '':
+ self._LoggerError(ST.ERR_DECPARSE_CGUID_GUID)
+ if not IsValidToken(CVAR_PATTERN, TokenList[0]):
+ self._LoggerError(ST.ERR_DECPARSE_PCD_CVAR_GUID)
+
+ self._CheckReDefine(TokenList[0])
+
+ if TokenList[1][0] != '{':
+ if not CheckGuidRegFormat(TokenList[1]):
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_PKGGUID)
+ GuidString = TokenList[1]
+ else:
+ #
+ # Convert C format GUID to GUID string and Simple error check
+ #
+ GuidString = GuidStructureStringToGuidString(TokenList[1])
+ if TokenList[1][0] != '{' or TokenList[1][-1] != '}' or GuidString == '':
+ self._LoggerError(ST.ERR_DECPARSE_CGUID_GUIDFORMAT)
+
+ #
+ # Check C format GUID
+ #
+ if not IsValidCFormatGuid(TokenList[1]):
+ self._LoggerError(ST.ERR_DECPARSE_CGUID_GUIDFORMAT)
+
+ Item = DecGuidItemObject(TokenList[0], TokenList[1], GuidString)
+ ItemObject = self.ObjectDict[self._RawData.CurrentScope[0][0]]
+ ItemObject.AddItem(Item, self._RawData.CurrentScope)
+ return Item
+
+## _DecUserExtension
+#
+# Parse user extension section
+#
+class _DecUserExtension(_DecBase):
+ def __init__(self, RawData):
+ _DecBase.__init__(self, RawData)
+ self.ItemObject = DecUserExtensionObject(RawData.Filename)
+ self._Headers = []
+ self._CurItems = []
+
+ def BlockStart(self):
+ self._CurItems = []
+ for Header in self._RawData.CurrentScope:
+ if Header in self._Headers:
+ self._LoggerError(ST.ERR_DECPARSE_UE_DUPLICATE)
+ else:
+ self._Headers.append(Header)
+
+ for Item in self._CurItems:
+ if Item.UserId == Header[1] and Item.IdString == Header[2]:
+ Item.ArchAndModuleType.append(Header[3])
+ break
+ else:
+ Item = DecUserExtensionItemObject()
+ Item.UserId = Header[1]
+ Item.IdString = Header[2]
+ Item.ArchAndModuleType.append(Header[3])
+ self._CurItems.append(Item)
+ self.ItemObject.AddItem(Item, None)
+ self._LocalMacro = {}
+
+ def _ParseItem(self):
+ Line = self._RawData.CurrentLine
+ Item = None
+ for Item in self._CurItems:
+ if Item.UserString:
+ Item.UserString = '\n'.join([Item.UserString, Line])
+ else:
+ Item.UserString = Line
+ return Item
+
+## Dec
+#
+# Top dec parser
+#
+class Dec(_DecBase, _DecComments):
+ def __init__(self, DecFile, Parse = True):
+ try:
+ Content = ConvertSpecialChar(open(DecFile, 'r').readlines())
+ except BaseException:
+ Logger.Error(TOOL_NAME, FILE_OPEN_FAILURE, File=DecFile,
+ ExtraData=ST.ERR_DECPARSE_FILEOPEN % DecFile)
+
+ #
+ # Pre-parser for Private section
+ #
+ self._Private = ''
+ __IsFoundPrivate = False
+ NewContent = []
+ for Line in Content:
+ Line = Line.strip()
+ if Line.startswith(DT.TAB_SECTION_START) and Line.endswith(DT.TAB_PRIVATE + DT.TAB_SECTION_END):
+ __IsFoundPrivate = True
+ if Line.startswith(DT.TAB_SECTION_START) and Line.endswith(DT.TAB_SECTION_END)\
+ and not Line.endswith(DT.TAB_PRIVATE + DT.TAB_SECTION_END):
+ __IsFoundPrivate = False
+ if __IsFoundPrivate:
+ self._Private += Line + '\r'
+ if not __IsFoundPrivate:
+ NewContent.append(Line + '\r')
+
+ RawData = FileContent(DecFile, NewContent)
+
+ _DecComments.__init__(self)
+ _DecBase.__init__(self, RawData)
+
+ self.BinaryHeadComment = []
+ self.PcdErrorCommentDict = {}
+
+ self._Define = _DecDefine(RawData)
+ self._Include = _DecInclude(RawData)
+ self._Guid = _DecGuid(RawData)
+ self._LibClass = _DecLibraryclass(RawData)
+ self._Pcd = _DecPcd(RawData)
+ self._UserEx = _DecUserExtension(RawData)
+
+ #
+ # DEC file supported data types (one type per section)
+ #
+ self._SectionParser = {
+ DT.TAB_DEC_DEFINES.upper() : self._Define,
+ DT.TAB_INCLUDES.upper() : self._Include,
+ DT.TAB_LIBRARY_CLASSES.upper() : self._LibClass,
+ DT.TAB_GUIDS.upper() : self._Guid,
+ DT.TAB_PPIS.upper() : self._Guid,
+ DT.TAB_PROTOCOLS.upper() : self._Guid,
+ DT.TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : self._Pcd,
+ DT.TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : self._Pcd,
+ DT.TAB_PCDS_FEATURE_FLAG_NULL.upper() : self._Pcd,
+ DT.TAB_PCDS_DYNAMIC_NULL.upper() : self._Pcd,
+ DT.TAB_PCDS_DYNAMIC_EX_NULL.upper() : self._Pcd,
+ DT.TAB_USER_EXTENSIONS.upper() : self._UserEx
+ }
+
+ if Parse:
+ self.ParseDecComment()
+ self.Parse()
+ #
+ # Parsing done, check required fields
+ #
+ self.CheckRequiredFields()
+
+ def CheckRequiredFields(self):
+ for SectionParser in self._SectionParser.values():
+ if not SectionParser.CheckRequiredFields():
+ return False
+ return True
+
+ ##
+ # Parse DEC file
+ #
+ def ParseDecComment(self):
+ IsFileHeader = False
+ IsBinaryHeader = False
+ FileHeaderLineIndex = -1
+ BinaryHeaderLineIndex = -1
+ TokenSpaceGuidCName = ''
+
+ #
+ # Parse PCD error comment section
+ #
+ while not self._RawData.IsEndOfFile():
+ self._RawData.CurrentLine = self._RawData.GetNextLine()
+ if self._RawData.CurrentLine.startswith(DT.TAB_COMMENT_SPLIT) and \
+ DT.TAB_SECTION_START in self._RawData.CurrentLine and \
+ DT.TAB_SECTION_END in self._RawData.CurrentLine:
+ self._RawData.CurrentLine = self._RawData.CurrentLine.replace(DT.TAB_COMMENT_SPLIT, '').strip()
+
+ if self._RawData.CurrentLine[0] == DT.TAB_SECTION_START and \
+ self._RawData.CurrentLine[-1] == DT.TAB_SECTION_END:
+ RawSection = self._RawData.CurrentLine[1:-1].strip()
+ if RawSection.upper().startswith(DT.TAB_PCD_ERROR.upper()+'.'):
+ TokenSpaceGuidCName = RawSection.split(DT.TAB_PCD_ERROR+'.')[1].strip()
+ continue
+
+ if TokenSpaceGuidCName and self._RawData.CurrentLine.startswith(DT.TAB_COMMENT_SPLIT):
+ self._RawData.CurrentLine = self._RawData.CurrentLine.replace(DT.TAB_COMMENT_SPLIT, '').strip()
+ if self._RawData.CurrentLine != '':
+ if DT.TAB_VALUE_SPLIT not in self._RawData.CurrentLine:
+ self._LoggerError(ST.ERR_DECPARSE_PCDERRORMSG_MISS_VALUE_SPLIT)
+
+ PcdErrorNumber, PcdErrorMsg = GetSplitValueList(self._RawData.CurrentLine, DT.TAB_VALUE_SPLIT, 1)
+ PcdErrorNumber = ParsePcdErrorCode(PcdErrorNumber, self._RawData.Filename, self._RawData.LineIndex)
+ if not PcdErrorMsg.strip():
+ self._LoggerError(ST.ERR_DECPARSE_PCD_MISS_ERRORMSG)
+
+ self.PcdErrorCommentDict[(TokenSpaceGuidCName, PcdErrorNumber)] = PcdErrorMsg.strip()
+ else:
+ TokenSpaceGuidCName = ''
+
+ self._RawData.LineIndex = 0
+ self._RawData.CurrentLine = ''
+ self._RawData.NextLine = ''
+
+ while not self._RawData.IsEndOfFile():
+ Line, Comment = CleanString(self._RawData.GetNextLine())
+
+ #
+ # Header must be pure comment
+ #
+ if Line != '':
+ self._RawData.UndoNextLine()
+ break
+
+ if Comment and Comment.startswith(DT.TAB_SPECIAL_COMMENT) and Comment.find(DT.TAB_HEADER_COMMENT) > 0 \
+ and not Comment[2:Comment.find(DT.TAB_HEADER_COMMENT)].strip():
+ IsFileHeader = True
+ IsBinaryHeader = False
+ FileHeaderLineIndex = self._RawData.LineIndex
+
+ #
+ # Get license information before '@file'
+ #
+ if not IsFileHeader and not IsBinaryHeader and Comment and Comment.startswith(DT.TAB_COMMENT_SPLIT) and \
+ DT.TAB_BINARY_HEADER_COMMENT not in Comment:
+ self._HeadComment.append((Comment, self._RawData.LineIndex))
+
+ if Comment and IsFileHeader and \
+ not(Comment.startswith(DT.TAB_SPECIAL_COMMENT) \
+ and Comment.find(DT.TAB_BINARY_HEADER_COMMENT) > 0):
+ self._HeadComment.append((Comment, self._RawData.LineIndex))
+ #
+ # Double '#' indicates end of header comments
+ #
+ if (not Comment or Comment == DT.TAB_SPECIAL_COMMENT) and IsFileHeader:
+ IsFileHeader = False
+ continue
+
+ if Comment and Comment.startswith(DT.TAB_SPECIAL_COMMENT) \
+ and Comment.find(DT.TAB_BINARY_HEADER_COMMENT) > 0:
+ IsBinaryHeader = True
+ IsFileHeader = False
+ BinaryHeaderLineIndex = self._RawData.LineIndex
+
+ if Comment and IsBinaryHeader:
+ self.BinaryHeadComment.append((Comment, self._RawData.LineIndex))
+ #
+ # Double '#' indicates end of header comments
+ #
+ if (not Comment or Comment == DT.TAB_SPECIAL_COMMENT) and IsBinaryHeader:
+ IsBinaryHeader = False
+ break
+
+ if FileHeaderLineIndex > -1 and not IsFileHeader and not IsBinaryHeader:
+ break
+
+ if FileHeaderLineIndex > BinaryHeaderLineIndex and FileHeaderLineIndex > -1 and BinaryHeaderLineIndex > -1:
+ self._LoggerError(ST.ERR_BINARY_HEADER_ORDER)
+
+ if FileHeaderLineIndex == -1:
+# self._LoggerError(ST.ERR_NO_SOURCE_HEADER)
+ Logger.Error(TOOL_NAME, FORMAT_INVALID,
+ ST.ERR_NO_SOURCE_HEADER,
+ File=self._RawData.Filename)
+ return
+
+ def _StopCurrentParsing(self, Line):
+ return False
+
+ def _ParseItem(self):
+ self._SectionHeaderParser()
+ if len(self._RawData.CurrentScope) == 0:
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_EMPTY)
+ SectionObj = self._SectionParser[self._RawData.CurrentScope[0][0]]
+ SectionObj.BlockStart()
+ SectionObj.Parse()
+ return SectionObj.GetDataObject()
+
+ def _UserExtentionSectionParser(self):
+ self._RawData.CurrentScope = []
+ ArchList = set()
+ Section = self._RawData.CurrentLine[1:-1]
+ Par = ParserHelper(Section, self._RawData.Filename)
+ while not Par.End():
+ #
+ # User extention
+ #
+ Token = Par.GetToken()
+ if Token.upper() != DT.TAB_USER_EXTENSIONS.upper():
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_UE)
+ UserExtension = Token.upper()
+ Par.AssertChar(DT.TAB_SPLIT, ST.ERR_DECPARSE_SECTION_UE, self._RawData.LineIndex)
+
+ #
+ # UserID
+ #
+ Token = Par.GetToken()
+ if not IsValidUserId(Token):
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_UE_USERID)
+ UserId = Token
+ Par.AssertChar(DT.TAB_SPLIT, ST.ERR_DECPARSE_SECTION_UE, self._RawData.LineIndex)
+ #
+ # IdString
+ #
+ Token = Par.GetToken()
+ if not IsValidIdString(Token):
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_UE_IDSTRING)
+ IdString = Token
+ Arch = 'COMMON'
+ if Par.Expect(DT.TAB_SPLIT):
+ Token = Par.GetToken()
+ Arch = Token.upper()
+ if not IsValidArch(Arch):
+ self._LoggerError(ST.ERR_DECPARSE_ARCH)
+ ArchList.add(Arch)
+ if [UserExtension, UserId, IdString, Arch] not in \
+ self._RawData.CurrentScope:
+ self._RawData.CurrentScope.append(
+ [UserExtension, UserId, IdString, Arch]
+ )
+ if not Par.Expect(DT.TAB_COMMA_SPLIT):
+ break
+ elif Par.End():
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_COMMA)
+ Par.AssertEnd(ST.ERR_DECPARSE_SECTION_UE, self._RawData.LineIndex)
+ if 'COMMON' in ArchList and len(ArchList) > 1:
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_COMMON)
+
+ ## Section header parser
+ #
+ # The section header is always in following format:
+ #
+ # [section_name.arch<.platform|module_type>]
+ #
+ def _SectionHeaderParser(self):
+ if self._RawData.CurrentLine[0] != DT.TAB_SECTION_START or self._RawData.CurrentLine[-1] != DT.TAB_SECTION_END:
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_IDENTIFY)
+
+ RawSection = self._RawData.CurrentLine[1:-1].strip().upper()
+ #
+ # Check defines section which is only allowed to occur once and
+ # no arch can be followed
+ #
+ if RawSection.startswith(DT.TAB_DEC_DEFINES.upper()):
+ if RawSection != DT.TAB_DEC_DEFINES.upper():
+ self._LoggerError(ST.ERR_DECPARSE_DEFINE_SECNAME)
+ #
+ # Check user extension section
+ #
+ if RawSection.startswith(DT.TAB_USER_EXTENSIONS.upper()):
+ return self._UserExtentionSectionParser()
+ self._RawData.CurrentScope = []
+ SectionNames = []
+ ArchList = set()
+ for Item in GetSplitValueList(RawSection, DT.TAB_COMMA_SPLIT):
+ if Item == '':
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_SUBEMPTY % self._RawData.CurrentLine)
+
+ ItemList = GetSplitValueList(Item, DT.TAB_SPLIT)
+ #
+ # different types of PCD are permissible in one section
+ #
+ SectionName = ItemList[0]
+ if SectionName not in self._SectionParser:
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_UNKNOW % SectionName)
+ if SectionName not in SectionNames:
+ SectionNames.append(SectionName)
+ #
+ # In DEC specification, all section headers have at most two part:
+ # SectionName.Arch except UserExtension
+ #
+ if len(ItemList) > 2:
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_SUBTOOMANY % Item)
+
+ if DT.TAB_PCDS_FEATURE_FLAG_NULL.upper() in SectionNames and len(SectionNames) > 1:
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_FEATUREFLAG % DT.TAB_PCDS_FEATURE_FLAG_NULL)
+ #
+ # S1 is always Arch
+ #
+ if len(ItemList) > 1:
+ Str1 = ItemList[1]
+ if not IsValidArch(Str1):
+ self._LoggerError(ST.ERR_DECPARSE_ARCH)
+ else:
+ Str1 = 'COMMON'
+ ArchList.add(Str1)
+
+ if [SectionName, Str1] not in self._RawData.CurrentScope:
+ self._RawData.CurrentScope.append([SectionName, Str1])
+ #
+ # 'COMMON' must not be used with specific ARCHs at the same section
+ #
+ if 'COMMON' in ArchList and len(ArchList) > 1:
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_COMMON)
+ if len(SectionNames) == 0:
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_SUBEMPTY % self._RawData.CurrentLine)
+ if len(SectionNames) != 1:
+ for Sec in SectionNames:
+ if not Sec.startswith(DT.TAB_PCDS.upper()):
+ self._LoggerError(ST.ERR_DECPARSE_SECTION_NAME % str(SectionNames))
+
+ def GetDefineSectionMacro(self):
+ return self._Define.GetLocalMacro()
+ def GetDefineSectionObject(self):
+ return self._Define.GetDataObject()
+ def GetIncludeSectionObject(self):
+ return self._Include.GetDataObject()
+ def GetGuidSectionObject(self):
+ return self._Guid.GetGuidObject()
+ def GetProtocolSectionObject(self):
+ return self._Guid.GetProtocolObject()
+ def GetPpiSectionObject(self):
+ return self._Guid.GetPpiObject()
+ def GetLibraryClassSectionObject(self):
+ return self._LibClass.GetDataObject()
+ def GetPcdSectionObject(self):
+ return self._Pcd.GetDataObject()
+ def GetUserExtensionSectionObject(self):
+ return self._UserEx.GetDataObject()
+ def GetPackageSpecification(self):
+ return self._Define.GetDataObject().GetPackageSpecification()
+ def GetPackageName(self):
+ return self._Define.GetDataObject().GetPackageName()
+ def GetPackageGuid(self):
+ return self._Define.GetDataObject().GetPackageGuid()
+ def GetPackageVersion(self):
+ return self._Define.GetDataObject().GetPackageVersion()
+ def GetPackageUniFile(self):
+ return self._Define.GetDataObject().GetPackageUniFile()
+ def GetPrivateSections(self):
+ return self._Private
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/DecParserMisc.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/DecParserMisc.py
new file mode 100755
index 00000000..4731c85c
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/DecParserMisc.py
@@ -0,0 +1,364 @@
+## @file
+# This file is used to define helper class and function for DEC parser
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+'''
+DecParserMisc
+'''
+
+## Import modules
+#
+import os
+import Logger.Log as Logger
+from Logger.ToolError import FILE_PARSE_FAILURE
+from Logger import StringTable as ST
+from Library.DataType import TAB_COMMENT_SPLIT
+from Library.DataType import TAB_COMMENT_EDK1_SPLIT
+from Library.ExpressionValidate import IsValidBareCString
+from Library.ParserValidate import IsValidCFormatGuid
+from Library.ExpressionValidate import IsValidFeatureFlagExp
+from Library.ExpressionValidate import IsValidLogicalExpr
+from Library.ExpressionValidate import IsValidStringTest
+from Library.Misc import CheckGuidRegFormat
+
+TOOL_NAME = 'DecParser'
+VERSION_PATTERN = '[0-9]+(\.[0-9]+)?'
+CVAR_PATTERN = '[_a-zA-Z][a-zA-Z0-9_]*'
+PCD_TOKEN_PATTERN = '(0[xX]0*[a-fA-F0-9]{1,8})|([0-9]+)'
+MACRO_PATTERN = '[A-Z][_A-Z0-9]*'
+
+## FileContent
+# Class to hold DEC file information
+#
+class FileContent:
+ def __init__(self, Filename, FileContent2):
+ self.Filename = Filename
+ self.PackagePath, self.PackageFile = os.path.split(Filename)
+ self.LineIndex = 0
+ self.CurrentLine = ''
+ self.NextLine = ''
+ self.HeadComment = []
+ self.TailComment = []
+ self.CurrentScope = None
+ self.Content = FileContent2
+ self.Macros = {}
+ self.FileLines = len(FileContent2)
+
+ def GetNextLine(self):
+ if self.LineIndex >= self.FileLines:
+ return ''
+ Line = self.Content[self.LineIndex]
+ self.LineIndex += 1
+ return Line
+
+ def UndoNextLine(self):
+ if self.LineIndex > 0:
+ self.LineIndex -= 1
+
+ def ResetNext(self):
+ self.HeadComment = []
+ self.TailComment = []
+ self.NextLine = ''
+
+ def SetNext(self, Line, HeadComment, TailComment):
+ self.NextLine = Line
+ self.HeadComment = HeadComment
+ self.TailComment = TailComment
+
+ def IsEndOfFile(self):
+ return self.LineIndex >= self.FileLines
+
+
+## StripRoot
+#
+# Strip root path
+#
+# @param Root: Root must be absolute path
+# @param Path: Path to be stripped
+#
+def StripRoot(Root, Path):
+ OrigPath = Path
+ Root = os.path.normpath(Root)
+ Path = os.path.normpath(Path)
+ if not os.path.isabs(Root):
+ return OrigPath
+ if Path.startswith(Root):
+ Path = Path[len(Root):]
+ if Path and Path[0] == os.sep:
+ Path = Path[1:]
+ return Path
+ return OrigPath
+
+## CleanString
+#
+# Split comments in a string
+# Remove spaces
+#
+# @param Line: The string to be cleaned
+# @param CommentCharacter: Comment char, used to ignore comment content,
+# default is DataType.TAB_COMMENT_SPLIT
+#
+def CleanString(Line, CommentCharacter=TAB_COMMENT_SPLIT, \
+ AllowCppStyleComment=False):
+ #
+ # remove whitespace
+ #
+ Line = Line.strip()
+ #
+ # Replace EDK1's comment character
+ #
+ if AllowCppStyleComment:
+ Line = Line.replace(TAB_COMMENT_EDK1_SPLIT, CommentCharacter)
+ #
+ # separate comments and statements
+ #
+ Comment = ''
+ InQuote = False
+ for Index in range(0, len(Line)):
+ if Line[Index] == '"':
+ InQuote = not InQuote
+ continue
+ if Line[Index] == CommentCharacter and not InQuote:
+ Comment = Line[Index:].strip()
+ Line = Line[0:Index].strip()
+ break
+
+ return Line, Comment
+
+
+## IsValidNumValUint8
+#
+# Check if Token is NumValUint8: <NumValUint8> ::= {<ShortNum>} {<UINT8>} {<Expression>}
+#
+# @param Token: Token to be checked
+#
+def IsValidNumValUint8(Token):
+ Valid = True
+ Cause = ""
+ TokenValue = None
+ Token = Token.strip()
+ if Token.lower().startswith('0x'):
+ Base = 16
+ else:
+ Base = 10
+ try:
+ TokenValue = int(Token, Base)
+ except BaseException:
+ Valid, Cause = IsValidLogicalExpr(Token, True)
+ if Cause:
+ pass
+ if not Valid:
+ return False
+ if TokenValue and (TokenValue < 0 or TokenValue > 0xFF):
+ return False
+ else:
+ return True
+
+## IsValidNList
+#
+# Check if Value has the format of <NumValUint8> ["," <NumValUint8>]{0,}
+# <NumValUint8> ::= {<ShortNum>} {<UINT8>} {<Expression>}
+#
+# @param Value: Value to be checked
+#
+def IsValidNList(Value):
+ Par = ParserHelper(Value)
+ if Par.End():
+ return False
+ while not Par.End():
+ Token = Par.GetToken(',')
+ if not IsValidNumValUint8(Token):
+ return False
+ if Par.Expect(','):
+ if Par.End():
+ return False
+ continue
+ else:
+ break
+ return Par.End()
+
+## IsValidCArray
+#
+# check Array is valid
+#
+# @param Array: The input Array
+#
+def IsValidCArray(Array):
+ Par = ParserHelper(Array)
+ if not Par.Expect('{'):
+ return False
+ if Par.End():
+ return False
+ while not Par.End():
+ Token = Par.GetToken(',}')
+ #
+ # ShortNum, UINT8, Expression
+ #
+ if not IsValidNumValUint8(Token):
+ return False
+ if Par.Expect(','):
+ if Par.End():
+ return False
+ continue
+ elif Par.Expect('}'):
+ #
+ # End of C array
+ #
+ break
+ else:
+ return False
+ return Par.End()
+
+## IsValidPcdDatum
+#
+# check PcdDatum is valid
+#
+# @param Type: The pcd Type
+# @param Value: The pcd Value
+#
+def IsValidPcdDatum(Type, Value):
+ if not Value:
+ return False, ST.ERR_DECPARSE_PCD_VALUE_EMPTY
+ Valid = True
+ Cause = ""
+ if Type not in ["UINT8", "UINT16", "UINT32", "UINT64", "VOID*", "BOOLEAN"]:
+ return False, ST.ERR_DECPARSE_PCD_TYPE
+ if Type == "VOID*":
+ if not ((Value.startswith('L"') or Value.startswith('"') and \
+ Value.endswith('"'))
+ or (IsValidCArray(Value)) or (IsValidCFormatGuid(Value)) \
+ or (IsValidNList(Value)) or (CheckGuidRegFormat(Value))
+ ):
+ return False, ST.ERR_DECPARSE_PCD_VOID % (Value, Type)
+ RealString = Value[Value.find('"') + 1 :-1]
+ if RealString:
+ if not IsValidBareCString(RealString):
+ return False, ST.ERR_DECPARSE_PCD_VOID % (Value, Type)
+ elif Type == 'BOOLEAN':
+ if Value in ['TRUE', 'FALSE', 'true', 'false', 'True', 'False',
+ '0x1', '0x01', '1', '0x0', '0x00', '0']:
+ return True, ""
+ Valid, Cause = IsValidStringTest(Value, True)
+ if not Valid:
+ Valid, Cause = IsValidFeatureFlagExp(Value, True)
+ if not Valid:
+ return False, Cause
+ else:
+ if Value and (Value[0] == '-' or Value[0] == '+'):
+ return False, ST.ERR_DECPARSE_PCD_INT_NEGTIVE % (Value, Type)
+ try:
+ StrVal = Value
+ if Value and not Value.startswith('0x') \
+ and not Value.startswith('0X'):
+ Value = Value.lstrip('0')
+ if not Value:
+ return True, ""
+ Value = int(Value, 0)
+ MAX_VAL_TYPE = {"BOOLEAN": 0x01, 'UINT8': 0xFF, 'UINT16': 0xFFFF, 'UINT32': 0xFFFFFFFF,
+ 'UINT64': 0xFFFFFFFFFFFFFFFF}
+ if Value > MAX_VAL_TYPE[Type]:
+ return False, ST.ERR_DECPARSE_PCD_INT_EXCEED % (StrVal, Type)
+ except BaseException:
+ Valid, Cause = IsValidLogicalExpr(Value, True)
+ if not Valid:
+ return False, Cause
+
+ return True, ""
+
+## ParserHelper
+#
+class ParserHelper:
+ def __init__(self, String, File=''):
+ self._String = String
+ self._StrLen = len(String)
+ self._Index = 0
+ self._File = File
+
+ ## End
+ #
+ # End
+ #
+ def End(self):
+ self.__SkipWhitespace()
+ return self._Index >= self._StrLen
+
+ ## __SkipWhitespace
+ #
+ # Skip whitespace
+ #
+ def __SkipWhitespace(self):
+ for Char in self._String[self._Index:]:
+ if Char not in ' \t':
+ break
+ self._Index += 1
+
+ ## Expect
+ #
+ # Expect char in string
+ #
+ # @param ExpectChar: char expected in index of string
+ #
+ def Expect(self, ExpectChar):
+ self.__SkipWhitespace()
+ for Char in self._String[self._Index:]:
+ if Char != ExpectChar:
+ return False
+ else:
+ self._Index += 1
+ return True
+ #
+ # Index out of bound of String
+ #
+ return False
+
+ ## GetToken
+ #
+ # Get token until encounter StopChar, front whitespace is consumed
+ #
+ # @param StopChar: Get token until encounter char in StopChar
+ # @param StkipPair: Only can be ' or ", StopChar in SkipPair are skipped
+ #
+ def GetToken(self, StopChar='.,|\t ', SkipPair='"'):
+ self.__SkipWhitespace()
+ PreIndex = self._Index
+ InQuote = False
+ LastChar = ''
+ for Char in self._String[self._Index:]:
+ if Char == SkipPair and LastChar != '\\':
+ InQuote = not InQuote
+ if Char in StopChar and not InQuote:
+ break
+ self._Index += 1
+ if Char == '\\' and LastChar == '\\':
+ LastChar = ''
+ else:
+ LastChar = Char
+ return self._String[PreIndex:self._Index]
+
+ ## AssertChar
+ #
+ # Assert char at current index of string is AssertChar, or will report
+ # error message
+ #
+ # @param AssertChar: AssertChar
+ # @param ErrorString: ErrorString
+ # @param ErrorLineNum: ErrorLineNum
+ #
+ def AssertChar(self, AssertChar, ErrorString, ErrorLineNum):
+ if not self.Expect(AssertChar):
+ Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._File,
+ Line=ErrorLineNum, ExtraData=ErrorString)
+
+ ## AssertEnd
+ #
+ # @param ErrorString: ErrorString
+ # @param ErrorLineNum: ErrorLineNum
+ #
+ def AssertEnd(self, ErrorString, ErrorLineNum):
+ self.__SkipWhitespace()
+ if self._Index != self._StrLen:
+ Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._File,
+ Line=ErrorLineNum, ExtraData=ErrorString)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
new file mode 100755
index 00000000..0f9e9ad3
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
@@ -0,0 +1,283 @@
+## @file
+# This file is used to provide method for process AsBuilt INF file. It will consumed by InfParser
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+'''
+InfAsBuiltProcess
+'''
+## Import modules
+#
+
+import os
+import re
+from Library import GlobalData
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger import ToolError
+
+from Library.StringUtils import GetSplitValueList
+from Library.Misc import GetHelpStringByRemoveHashKey
+from Library.Misc import ValidFile
+from Library.Misc import ProcessLineExtender
+from Library.ParserValidate import IsValidPath
+from Library.Parsing import MacroParser
+from Parser.InfParserMisc import InfExpandMacro
+
+from Library import DataType as DT
+
+## GetLibInstanceInfo
+#
+# Get the information from Library Instance INF file.
+#
+# @param string. A string start with # and followed by INF file path
+# @param WorkSpace. The WorkSpace directory used to combined with INF file path.
+#
+# @return GUID, Version
+def GetLibInstanceInfo(String, WorkSpace, LineNo, CurrentInfFileName):
+
+ FileGuidString = ""
+ VerString = ""
+
+ OriginalString = String
+ String = String.strip()
+ if not String:
+ return None, None
+ #
+ # Remove "#" characters at the beginning
+ #
+ String = GetHelpStringByRemoveHashKey(String)
+ String = String.strip()
+
+ #
+ # To deal with library instance specified by GUID and version
+ #
+ RegFormatGuidPattern = re.compile("\s*([0-9a-fA-F]){8}-"
+ "([0-9a-fA-F]){4}-"
+ "([0-9a-fA-F]){4}-"
+ "([0-9a-fA-F]){4}-"
+ "([0-9a-fA-F]){12}\s*")
+ VersionPattern = re.compile('[\t\s]*\d+(\.\d+)?[\t\s]*')
+ GuidMatchedObj = RegFormatGuidPattern.search(String)
+
+ if String.upper().startswith('GUID') and GuidMatchedObj and 'Version' in String:
+ VersionStr = String[String.upper().find('VERSION') + 8:]
+ VersionMatchedObj = VersionPattern.search(VersionStr)
+ if VersionMatchedObj:
+ Guid = GuidMatchedObj.group().strip()
+ Version = VersionMatchedObj.group().strip()
+ return Guid, Version
+
+ #
+ # To deal with library instance specified by file name
+ #
+ FileLinesList = GetFileLineContent(String, WorkSpace, LineNo, OriginalString)
+
+
+ ReFindFileGuidPattern = re.compile("^\s*FILE_GUID\s*=.*$")
+ ReFindVerStringPattern = re.compile("^\s*VERSION_STRING\s*=.*$")
+
+ for Line in FileLinesList:
+ if ReFindFileGuidPattern.match(Line):
+ FileGuidString = Line
+ if ReFindVerStringPattern.match(Line):
+ VerString = Line
+
+ if FileGuidString:
+ FileGuidString = GetSplitValueList(FileGuidString, '=', 1)[1]
+ if VerString:
+ VerString = GetSplitValueList(VerString, '=', 1)[1]
+
+ return FileGuidString, VerString
+
+## GetPackageListInfo
+#
+# Get the package information from INF file.
+#
+# @param string. A string start with # and followed by INF file path
+# @param WorkSpace. The WorkSpace directory used to combined with INF file path.
+#
+# @return GUID, Version
+def GetPackageListInfo(FileNameString, WorkSpace, LineNo):
+ PackageInfoList = []
+ DefineSectionMacros = {}
+ PackageSectionMacros = {}
+
+ FileLinesList = GetFileLineContent(FileNameString, WorkSpace, LineNo, '')
+
+ RePackageHeader = re.compile('^\s*\[Packages.*\].*$')
+ ReDefineHeader = re.compile('^\s*\[Defines].*$')
+
+ PackageHederFlag = False
+ DefineHeaderFlag = False
+ LineNo = -1
+ for Line in FileLinesList:
+ LineNo += 1
+ Line = Line.strip()
+
+ if Line.startswith('['):
+ PackageHederFlag = False
+ DefineHeaderFlag = False
+
+ if Line.startswith("#"):
+ continue
+
+ if not Line:
+ continue
+
+ #
+ # Found [Packages] section
+ #
+ if RePackageHeader.match(Line):
+ PackageHederFlag = True
+ continue
+
+ #
+ # Found [Define] section
+ #
+ if ReDefineHeader.match(Line):
+ DefineHeaderFlag = True
+ continue
+
+ if DefineHeaderFlag:
+ #
+ # Find Macro
+ #
+ Name, Value = MacroParser((Line, LineNo),
+ FileNameString,
+ DT.MODEL_META_DATA_HEADER,
+ DefineSectionMacros)
+
+ if Name is not None:
+ DefineSectionMacros[Name] = Value
+ continue
+
+ if PackageHederFlag:
+
+ #
+ # Find Macro
+ #
+ Name, Value = MacroParser((Line, LineNo),
+ FileNameString,
+ DT.MODEL_META_DATA_PACKAGE,
+ DefineSectionMacros)
+ if Name is not None:
+ PackageSectionMacros[Name] = Value
+ continue
+
+ #
+ # Replace with Local section Macro and [Defines] section Macro.
+ #
+ Line = InfExpandMacro(Line, (FileNameString, Line, LineNo), DefineSectionMacros, PackageSectionMacros, True)
+
+ Line = GetSplitValueList(Line, "#", 1)[0]
+ Line = GetSplitValueList(Line, "|", 1)[0]
+ PackageInfoList.append(Line)
+
+ return PackageInfoList
+
+def GetFileLineContent(FileName, WorkSpace, LineNo, OriginalString):
+
+ if not LineNo:
+ LineNo = -1
+
+ #
+ # Validate file name exist.
+ #
+ FullFileName = os.path.normpath(os.path.realpath(os.path.join(WorkSpace, FileName)))
+ if not (ValidFile(FullFileName)):
+ return []
+
+ #
+ # Validate file exist/format.
+ #
+ if not IsValidPath(FileName, WorkSpace):
+ return []
+
+ FileLinesList = []
+
+ try:
+ FullFileName = FullFileName.replace('\\', '/')
+ Inputfile = open(FullFileName, "r")
+ try:
+ FileLinesList = Inputfile.readlines()
+ except BaseException:
+ Logger.Error("InfParser", ToolError.FILE_READ_FAILURE, ST.ERR_FILE_OPEN_FAILURE, File=FullFileName)
+ finally:
+ Inputfile.close()
+ except BaseException:
+ Logger.Error("InfParser",
+ ToolError.FILE_READ_FAILURE,
+ ST.ERR_FILE_OPEN_FAILURE,
+ File=FullFileName)
+
+ FileLinesList = ProcessLineExtender(FileLinesList)
+
+ return FileLinesList
+
+##
+# Get all INF files from current workspace
+#
+#
+def GetInfsFromWorkSpace(WorkSpace):
+ InfFiles = []
+ for top, dirs, files in os.walk(WorkSpace):
+ dirs = dirs # just for pylint
+ for File in files:
+ if File.upper().endswith(".INF"):
+ InfFiles.append(os.path.join(top, File))
+
+ return InfFiles
+
+##
+# Get GUID and version from library instance file
+#
+#
+def GetGuidVerFormLibInstance(Guid, Version, WorkSpace, CurrentInfFileName):
+ for InfFile in GetInfsFromWorkSpace(WorkSpace):
+ try:
+ if InfFile.strip().upper() == CurrentInfFileName.strip().upper():
+ continue
+ InfFile = InfFile.replace('\\', '/')
+ if InfFile not in GlobalData.gLIBINSTANCEDICT:
+ InfFileObj = open(InfFile, "r")
+ GlobalData.gLIBINSTANCEDICT[InfFile] = InfFileObj
+ else:
+ InfFileObj = GlobalData.gLIBINSTANCEDICT[InfFile]
+
+ except BaseException:
+ Logger.Error("InfParser",
+ ToolError.FILE_READ_FAILURE,
+ ST.ERR_FILE_OPEN_FAILURE,
+ File=InfFile)
+ try:
+ FileLinesList = InfFileObj.readlines()
+ FileLinesList = ProcessLineExtender(FileLinesList)
+
+ ReFindFileGuidPattern = re.compile("^\s*FILE_GUID\s*=.*$")
+ ReFindVerStringPattern = re.compile("^\s*VERSION_STRING\s*=.*$")
+
+ for Line in FileLinesList:
+ if ReFindFileGuidPattern.match(Line):
+ FileGuidString = Line
+ if ReFindVerStringPattern.match(Line):
+ VerString = Line
+
+ if FileGuidString:
+ FileGuidString = GetSplitValueList(FileGuidString, '=', 1)[1]
+ if VerString:
+ VerString = GetSplitValueList(VerString, '=', 1)[1]
+
+ if FileGuidString.strip().upper() == Guid.upper() and \
+ VerString.strip().upper() == Version.upper():
+ return Guid, Version
+
+ except BaseException:
+ Logger.Error("InfParser", ToolError.FILE_READ_FAILURE, ST.ERR_FILE_OPEN_FAILURE, File=InfFile)
+ finally:
+ InfFileObj.close()
+
+ return '', ''
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py
new file mode 100755
index 00000000..b3ee7084
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py
@@ -0,0 +1,226 @@
+## @file
+# This file contained the parser for [Binaries] sections in INF file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+'''
+InfBinarySectionParser
+'''
+##
+# Import Modules
+#
+
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger.ToolError import FORMAT_INVALID
+from Parser.InfParserMisc import InfExpandMacro
+from Library import DataType as DT
+from Library.Parsing import MacroParser
+from Library.Misc import GetSplitValueList
+from Object.Parser.InfCommonObject import InfLineCommentObject
+from Object.Parser.InfCommonObject import CurrentLine
+from Parser.InfParserMisc import InfParserSectionRoot
+
+class InfBinarySectionParser(InfParserSectionRoot):
+ ## InfBinaryParser
+ #
+ #
+ def InfBinaryParser(self, SectionString, InfSectionObject, FileName):
+ #
+ # Macro defined in this section
+ #
+ SectionMacros = {}
+ ValueList = []
+ #
+ # For UI (UI, SEC_UI, UNI_UI) binaries
+ # One and only one UI section can be included
+ #
+ UiBinaryList = []
+ #
+ # For Version (VER, SEC_VER, UNI_VER).
+ # One and only one VER section on be included
+ #
+ VerBinaryList = []
+ #
+ # For other common type binaries
+ #
+ ComBinaryList = []
+
+ StillCommentFalg = False
+ HeaderComments = []
+ LineComment = None
+
+ AllSectionContent = ''
+ #
+ # Parse section content
+ #
+ for Line in SectionString:
+ BinLineContent = Line[0]
+ BinLineNo = Line[1]
+
+ if BinLineContent.strip() == '':
+ continue
+
+ CurrentLineObj = CurrentLine()
+ CurrentLineObj.FileName = FileName
+ CurrentLineObj.LineString = BinLineContent
+ CurrentLineObj.LineNo = BinLineNo
+ #
+ # Found Header Comments
+ #
+ if BinLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
+ #
+ # Last line is comments, and this line go on.
+ #
+ if StillCommentFalg:
+ HeaderComments.append(Line)
+ AllSectionContent += BinLineContent + DT.END_OF_LINE
+ continue
+ #
+ # First time encounter comment
+ #
+ else:
+ #
+ # Clear original data
+ #
+ HeaderComments = []
+ HeaderComments.append(Line)
+ AllSectionContent += BinLineContent + DT.END_OF_LINE
+ StillCommentFalg = True
+ continue
+ else:
+ StillCommentFalg = False
+
+ if len(HeaderComments) >= 1:
+ LineComment = InfLineCommentObject()
+ LineCommentContent = ''
+ for Item in HeaderComments:
+ LineCommentContent += Item[0] + DT.END_OF_LINE
+ LineComment.SetHeaderComments(LineCommentContent)
+
+ #
+ # Find Tail comment.
+ #
+ if BinLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
+ TailComments = BinLineContent[BinLineContent.find(DT.TAB_COMMENT_SPLIT):]
+ BinLineContent = BinLineContent[:BinLineContent.find(DT.TAB_COMMENT_SPLIT)]
+ if LineComment is None:
+ LineComment = InfLineCommentObject()
+ LineComment.SetTailComments(TailComments)
+
+ #
+ # Find Macro
+ #
+ MacroDef = MacroParser((BinLineContent, BinLineNo),
+ FileName,
+ DT.MODEL_EFI_BINARY_FILE,
+ self.FileLocalMacros)
+ if MacroDef[0] is not None:
+ SectionMacros[MacroDef[0]] = MacroDef[1]
+ LineComment = None
+ HeaderComments = []
+ continue
+
+ #
+ # Replace with Local section Macro and [Defines] section Macro.
+ #
+ LineContent = InfExpandMacro(BinLineContent,
+ (FileName, BinLineContent, BinLineNo),
+ self.FileLocalMacros,
+ SectionMacros, True)
+
+ AllSectionContent += LineContent + DT.END_OF_LINE
+ TokenList = GetSplitValueList(LineContent, DT.TAB_VALUE_SPLIT, 1)
+ ValueList[0:len(TokenList)] = TokenList
+
+ #
+ # Should equal to UI/SEC_UI/UNI_UI
+ #
+ ValueList[0] = ValueList[0].strip()
+ if ValueList[0] == DT.BINARY_FILE_TYPE_UNI_UI or \
+ ValueList[0] == DT.BINARY_FILE_TYPE_SEC_UI or \
+ ValueList[0] == DT.BINARY_FILE_TYPE_UI:
+ if len(ValueList) == 2:
+ TokenList = GetSplitValueList(ValueList[1],
+ DT.TAB_VALUE_SPLIT,
+ 2)
+ NewValueList = []
+ NewValueList.append(ValueList[0])
+ for Item in TokenList:
+ NewValueList.append(Item)
+ UiBinaryList.append((NewValueList,
+ LineComment,
+ CurrentLineObj))
+ #
+ # Should equal to VER/SEC_VER/UNI_VER
+ #
+ elif ValueList[0] == DT.BINARY_FILE_TYPE_UNI_VER or \
+ ValueList[0] == DT.BINARY_FILE_TYPE_SEC_VER or \
+ ValueList[0] == DT.BINARY_FILE_TYPE_VER:
+ if len(ValueList) == 2:
+ TokenList = GetSplitValueList(ValueList[1],
+ DT.TAB_VALUE_SPLIT,
+ 2)
+ NewValueList = []
+ NewValueList.append(ValueList[0])
+ for Item in TokenList:
+ NewValueList.append(Item)
+ VerBinaryList.append((NewValueList,
+ LineComment,
+ CurrentLineObj))
+ else:
+ if len(ValueList) == 2:
+ if ValueList[0].strip() == 'SUBTYPE_GUID':
+ TokenList = GetSplitValueList(ValueList[1],
+ DT.TAB_VALUE_SPLIT,
+ 5)
+ else:
+ TokenList = GetSplitValueList(ValueList[1],
+ DT.TAB_VALUE_SPLIT,
+ 4)
+
+ NewValueList = []
+ NewValueList.append(ValueList[0])
+ for Item in TokenList:
+ NewValueList.append(Item)
+ ComBinaryList.append((NewValueList,
+ LineComment,
+ CurrentLineObj))
+ elif len(ValueList) == 1:
+ NewValueList = []
+ NewValueList.append(ValueList[0])
+ ComBinaryList.append((NewValueList,
+ LineComment,
+ CurrentLineObj))
+
+
+
+
+ ValueList = []
+ LineComment = None
+ TailComments = ''
+ HeaderComments = []
+ continue
+
+ #
+ # Current section archs
+ #
+ ArchList = []
+ for Item in self.LastSectionHeaderContent:
+ if Item[1] not in ArchList:
+ ArchList.append(Item[1])
+ InfSectionObject.SetSupArchList(Item[1])
+
+ InfSectionObject.SetAllContent(AllSectionContent)
+ if not InfSectionObject.SetBinary(UiBinaryList,
+ VerBinaryList,
+ ComBinaryList,
+ ArchList):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR%("[Binaries]"),
+ File=FileName,
+ Line=Item[3])
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfBuildOptionSectionParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfBuildOptionSectionParser.py
new file mode 100755
index 00000000..3f74ff0c
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfBuildOptionSectionParser.py
@@ -0,0 +1,212 @@
+## @file
+# This file contained the parser for BuildOption sections in INF file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+InfBuildOptionSectionParser
+'''
+##
+# Import Modules
+#
+from Library import DataType as DT
+from Library import GlobalData
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger.ToolError import FORMAT_INVALID
+from Parser.InfParserMisc import InfExpandMacro
+from Library.Misc import GetSplitValueList
+from Parser.InfParserMisc import IsAsBuildOptionInfo
+from Library.Misc import GetHelpStringByRemoveHashKey
+from Library.ParserValidate import IsValidFamily
+from Library.ParserValidate import IsValidBuildOptionName
+from Parser.InfParserMisc import InfParserSectionRoot
+
+class InfBuildOptionSectionParser(InfParserSectionRoot):
+ ## InfBuildOptionParser
+ #
+ #
+ def InfBuildOptionParser(self, SectionString, InfSectionObject, FileName):
+
+ BuildOptionList = []
+ SectionContent = ''
+
+ if not GlobalData.gIS_BINARY_INF:
+ ValueList = []
+ LineNo = 0
+
+ for Line in SectionString:
+ LineContent = Line[0]
+ LineNo = Line[1]
+ TailComments = ''
+ ReplaceFlag = False
+
+ if LineContent.strip() == '':
+ SectionContent += LineContent + DT.END_OF_LINE
+ continue
+ #
+ # Found Comment
+ #
+ if LineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
+ SectionContent += LineContent + DT.END_OF_LINE
+ continue
+
+ #
+ # Find Tail comment.
+ #
+ if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
+ TailComments = LineContent[LineContent.find(DT.TAB_COMMENT_SPLIT):]
+ LineContent = LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
+
+ TokenList = GetSplitValueList(LineContent, DT.TAB_DEQUAL_SPLIT, 1)
+ if len(TokenList) == 2:
+ #
+ # "Replace" type build option
+ #
+ TokenList.append('True')
+ ReplaceFlag = True
+ else:
+ TokenList = GetSplitValueList(LineContent, DT.TAB_EQUAL_SPLIT, 1)
+ #
+ # "Append" type build option
+ #
+ if len(TokenList) == 2:
+ TokenList.append('False')
+ else:
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID,
+ ExtraData=LineContent,
+ File=FileName,
+ Line=LineNo)
+
+ ValueList[0:len(TokenList)] = TokenList
+
+ #
+ # Replace with [Defines] section Macro
+ #
+ ValueList[0] = InfExpandMacro(ValueList[0], (FileName, LineContent, LineNo),
+ self.FileLocalMacros, None)
+ ValueList[1] = InfExpandMacro(ValueList[1], (FileName, LineContent, LineNo),
+ self.FileLocalMacros, None, True)
+ EqualString = ''
+ if not ReplaceFlag:
+ EqualString = ' = '
+ else:
+ EqualString = ' == '
+
+ SectionContent += ValueList[0] + EqualString + ValueList[1] + TailComments + DT.END_OF_LINE
+
+ Family = GetSplitValueList(ValueList[0], DT.TAB_COLON_SPLIT, 1)
+ if len(Family) == 2:
+ if not IsValidFamily(Family[0]):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID,
+ ExtraData=LineContent,
+ File=FileName,
+ Line=LineNo)
+ if not IsValidBuildOptionName(Family[1]):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID,
+ ExtraData=LineContent,
+ File=FileName,
+ Line=LineNo)
+ if len(Family) == 1:
+ if not IsValidBuildOptionName(Family[0]):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID,
+ ExtraData=LineContent,
+ File=FileName,
+ Line=LineNo)
+
+ BuildOptionList.append(ValueList)
+ ValueList = []
+ continue
+ else:
+ BuildOptionList = InfAsBuiltBuildOptionParser(SectionString, FileName)
+
+ #
+ # Current section archs
+ #
+ ArchList = []
+ LastItem = ''
+ for Item in self.LastSectionHeaderContent:
+ LastItem = Item
+ if not (Item[1] == '' or Item[1] == '') and Item[1] not in ArchList:
+ ArchList.append(Item[1])
+ InfSectionObject.SetSupArchList(Item[1])
+
+ InfSectionObject.SetAllContent(SectionContent)
+ if not InfSectionObject.SetBuildOptions(BuildOptionList, ArchList, SectionContent):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR%("[BuilOptions]"),
+ File=FileName,
+ Line=LastItem[3])
+
+## InfBuildOptionParser
+#
+#
+def InfAsBuiltBuildOptionParser(SectionString, FileName):
+ BuildOptionList = []
+ #
+ # AsBuild Binary INF file.
+ #
+ AsBuildOptionFlag = False
+ BuildOptionItem = []
+ Count = 0
+ for Line in SectionString:
+ Count += 1
+ LineContent = Line[0]
+ LineNo = Line[1]
+
+ #
+ # The last line
+ #
+ if len(SectionString) == Count:
+ if LineContent.strip().startswith("##") and AsBuildOptionFlag:
+ BuildOptionList.append(BuildOptionItem)
+ BuildOptionList.append([GetHelpStringByRemoveHashKey(LineContent)])
+ elif LineContent.strip().startswith("#") and AsBuildOptionFlag:
+ BuildOptionInfo = GetHelpStringByRemoveHashKey(LineContent)
+ BuildOptionItem.append(BuildOptionInfo)
+ BuildOptionList.append(BuildOptionItem)
+ else:
+ if len(BuildOptionItem) > 0:
+ BuildOptionList.append(BuildOptionItem)
+
+ break
+
+ if LineContent.strip() == '':
+ AsBuildOptionFlag = False
+ continue
+
+ if LineContent.strip().startswith("##") and AsBuildOptionFlag:
+ if len(BuildOptionItem) > 0:
+ BuildOptionList.append(BuildOptionItem)
+
+ BuildOptionItem = []
+
+ if not LineContent.strip().startswith("#"):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_BO_CONTATIN_ASBUILD_AND_COMMON,
+ File=FileName,
+ Line=LineNo,
+ ExtraData=LineContent)
+
+ if IsAsBuildOptionInfo(LineContent):
+ AsBuildOptionFlag = True
+ continue
+
+ if AsBuildOptionFlag:
+ BuildOptionInfo = GetHelpStringByRemoveHashKey(LineContent)
+ BuildOptionItem.append(BuildOptionInfo)
+
+ return BuildOptionList
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py
new file mode 100755
index 00000000..2117da1c
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py
@@ -0,0 +1,191 @@
+## @file
+# This file contained the parser for define sections in INF file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+InfDefineSectionParser
+'''
+##
+# Import Modules
+#
+import re
+
+from Library import DataType as DT
+from Library import GlobalData
+from Library.Parsing import MacroParser
+from Library.Misc import GetSplitValueList
+from Library.ParserValidate import IsValidArch
+from Object.Parser.InfCommonObject import InfLineCommentObject
+from Object.Parser.InfDefineObject import InfDefMember
+from Parser.InfParserMisc import InfExpandMacro
+from Object.Parser.InfMisc import ErrorInInf
+from Logger import StringTable as ST
+from Parser.InfParserMisc import InfParserSectionRoot
+
+## __GetValidateArchList
+#
+#
+def GetValidateArchList(LineContent):
+
+ TempArch = ''
+ ArchList = []
+ ValidateAcrhPatten = re.compile(r"^\s*#\s*VALID_ARCHITECTURES\s*=\s*.*$", re.DOTALL)
+
+ if ValidateAcrhPatten.match(LineContent):
+ TempArch = GetSplitValueList(LineContent, DT.TAB_EQUAL_SPLIT, 1)[1]
+
+ TempArch = GetSplitValueList(TempArch, '(', 1)[0]
+
+ ArchList = re.split('\s+', TempArch)
+ NewArchList = []
+ for Arch in ArchList:
+ if IsValidArch(Arch):
+ NewArchList.append(Arch)
+
+ ArchList = NewArchList
+
+ return ArchList
+
+class InfDefinSectionParser(InfParserSectionRoot):
+ def InfDefineParser(self, SectionString, InfSectionObject, FileName, SectionComment):
+
+ if SectionComment:
+ pass
+ #
+ # Parser Defines section content and fill self._ContentList dict.
+ #
+ StillCommentFalg = False
+ HeaderComments = []
+ SectionContent = ''
+ ArchList = []
+ _ContentList = []
+ _ValueList = []
+ #
+ # Add WORKSPACE to global Marco dict.
+ #
+ self.FileLocalMacros['WORKSPACE'] = GlobalData.gWORKSPACE
+
+ for Line in SectionString:
+ LineContent = Line[0]
+ LineNo = Line[1]
+ TailComments = ''
+ LineComment = None
+
+ LineInfo = ['', -1, '']
+ LineInfo[0] = FileName
+ LineInfo[1] = LineNo
+ LineInfo[2] = LineContent
+
+ if LineContent.strip() == '':
+ continue
+ #
+ # The first time encountered VALIDATE_ARCHITECHERS will be considered as support arch list.
+ #
+ if not ArchList:
+ ArchList = GetValidateArchList(LineContent)
+
+ #
+ # Parser Comment
+ #
+ if LineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
+ #
+ # Last line is comments, and this line go on.
+ #
+ if StillCommentFalg:
+ HeaderComments.append(Line)
+ SectionContent += LineContent + DT.END_OF_LINE
+ continue
+ #
+ # First time encounter comment
+ #
+ else:
+ #
+ # Clear original data
+ #
+ HeaderComments = []
+ HeaderComments.append(Line)
+ StillCommentFalg = True
+ SectionContent += LineContent + DT.END_OF_LINE
+ continue
+ else:
+ StillCommentFalg = False
+
+ if len(HeaderComments) >= 1:
+ LineComment = InfLineCommentObject()
+ LineCommentContent = ''
+ for Item in HeaderComments:
+ LineCommentContent += Item[0] + DT.END_OF_LINE
+ LineComment.SetHeaderComments(LineCommentContent)
+
+ #
+ # Find Tail comment.
+ #
+ if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
+ TailComments = LineContent[LineContent.find(DT.TAB_COMMENT_SPLIT):]
+ LineContent = LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
+ if LineComment is None:
+ LineComment = InfLineCommentObject()
+ LineComment.SetTailComments(TailComments)
+
+ #
+ # Find Macro
+ #
+ Name, Value = MacroParser((LineContent, LineNo),
+ FileName,
+ DT.MODEL_META_DATA_HEADER,
+ self.FileLocalMacros)
+ if Name is not None:
+ self.FileLocalMacros[Name] = Value
+ continue
+
+ #
+ # Replace with [Defines] section Macro
+ #
+ LineContent = InfExpandMacro(LineContent,
+ (FileName, LineContent, LineNo),
+ self.FileLocalMacros,
+ None, True)
+
+ SectionContent += LineContent + DT.END_OF_LINE
+
+ TokenList = GetSplitValueList(LineContent, DT.TAB_EQUAL_SPLIT, 1)
+ if len(TokenList) < 2:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_NO_VALUE,
+ LineInfo=LineInfo)
+ _ValueList[0:len(TokenList)] = TokenList
+ if not _ValueList[0]:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_NO_NAME,
+ LineInfo=LineInfo)
+ if not _ValueList[1]:
+ ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_NO_VALUE,
+ LineInfo=LineInfo)
+
+ Name, Value = _ValueList[0], _ValueList[1]
+
+ InfDefMemberObj = InfDefMember(Name, Value)
+ if (LineComment is not None):
+ InfDefMemberObj.Comments.SetHeaderComments(LineComment.GetHeaderComments())
+ InfDefMemberObj.Comments.SetTailComments(LineComment.GetTailComments())
+
+ InfDefMemberObj.CurrentLine.SetFileName(self.FullPath)
+ InfDefMemberObj.CurrentLine.SetLineString(LineContent)
+ InfDefMemberObj.CurrentLine.SetLineNo(LineNo)
+
+ _ContentList.append(InfDefMemberObj)
+ HeaderComments = []
+ TailComments = ''
+
+ #
+ # Current Define section archs
+ #
+ if not ArchList:
+ ArchList = ['COMMON']
+
+ InfSectionObject.SetAllContent(SectionContent)
+
+ InfSectionObject.SetDefines(_ContentList, Arch=ArchList)
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py
new file mode 100755
index 00000000..86634b32
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py
@@ -0,0 +1,98 @@
+## @file
+# This file contained the parser for [Depex] sections in INF file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+'''
+InfDepexSectionParser
+'''
+##
+# Import Modules
+#
+import re
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger.ToolError import FORMAT_INVALID
+from Parser.InfParserMisc import InfExpandMacro
+from Library import DataType as DT
+from Library.Misc import GetSplitValueList
+from Parser.InfParserMisc import InfParserSectionRoot
+
+class InfDepexSectionParser(InfParserSectionRoot):
+ ## InfDepexParser
+ #
+ # For now, only separate Depex String and comments.
+ # Have two types of section header.
+ # 1. [Depex.Arch.ModuleType, ...]
+ # 2. [Depex.Arch|FFE, ...]
+ #
+ def InfDepexParser(self, SectionString, InfSectionObject, FileName):
+ DepexContent = []
+ DepexComment = []
+ ValueList = []
+ #
+ # Parse section content
+ #
+ for Line in SectionString:
+ LineContent = Line[0]
+ LineNo = Line[1]
+
+ #
+ # Found comment
+ #
+ if LineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
+ DepexComment.append((LineContent, LineNo))
+ continue
+ #
+ # Replace with [Defines] section Macro
+ #
+ LineContent = InfExpandMacro(LineContent,
+ (FileName, LineContent, Line[1]),
+ self.FileLocalMacros,
+ None, True)
+
+ CommentCount = LineContent.find(DT.TAB_COMMENT_SPLIT)
+
+ if CommentCount > -1:
+ DepexComment.append((LineContent[CommentCount:], LineNo))
+ LineContent = LineContent[:CommentCount-1]
+
+
+ CommentCount = -1
+ DepexContent.append((LineContent, LineNo))
+
+ TokenList = GetSplitValueList(LineContent, DT.TAB_COMMENT_SPLIT)
+ ValueList[0:len(TokenList)] = TokenList
+
+ #
+ # Current section archs
+ #
+ KeyList = []
+ LastItem = ''
+ for Item in self.LastSectionHeaderContent:
+ LastItem = Item
+ if (Item[1], Item[2], Item[3]) not in KeyList:
+ KeyList.append((Item[1], Item[2], Item[3]))
+
+ NewCommentList = []
+ FormatCommentLn = -1
+ ReFormatComment = re.compile(r"""#(?:\s*)\[(.*?)\](?:.*)""", re.DOTALL)
+ for CommentItem in DepexComment:
+ CommentContent = CommentItem[0]
+ if ReFormatComment.match(CommentContent) is not None:
+ FormatCommentLn = CommentItem[1] + 1
+ continue
+
+ if CommentItem[1] != FormatCommentLn:
+ NewCommentList.append(CommentContent)
+ else:
+ FormatCommentLn = CommentItem[1] + 1
+
+ if not InfSectionObject.SetDepex(DepexContent, KeyList = KeyList, CommentList = NewCommentList):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR%("[Depex]"),
+ File=FileName,
+ Line=LastItem[3])
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py
new file mode 100755
index 00000000..2716f8d7
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py
@@ -0,0 +1,368 @@
+## @file
+# This file contained the parser for [Guids], [Ppis], [Protocols] sections in INF file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+'''
+InfGuidPpiProtocolSectionParser
+'''
+##
+# Import Modules
+#
+
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger.ToolError import FORMAT_INVALID
+from Parser.InfParserMisc import InfExpandMacro
+from Library import DataType as DT
+from Library import GlobalData
+from Library.Parsing import MacroParser
+from Library.Misc import GetSplitValueList
+from Library.ParserValidate import IsValidIdString
+from Library.ParserValidate import IsValidUserId
+from Library.ParserValidate import IsValidArch
+from Parser.InfParserMisc import InfParserSectionRoot
+
+class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
+ ## InfGuidParser
+ #
+ #
+ def InfGuidParser(self, SectionString, InfSectionObject, FileName):
+ #
+ # Macro defined in this section
+ #
+ SectionMacros = {}
+ ValueList = []
+ GuidList = []
+ CommentsList = []
+ CurrentLineVar = None
+ #
+ # Parse section content
+ #
+ for Line in SectionString:
+ LineContent = Line[0]
+ LineNo = Line[1]
+
+ if LineContent.strip() == '':
+ CommentsList = []
+ continue
+
+ if LineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
+ CommentsList.append(Line)
+ continue
+ else:
+ #
+ # Encounter a GUID entry
+ #
+ if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
+ CommentsList.append((
+ LineContent[LineContent.find(DT.TAB_COMMENT_SPLIT):],
+ LineNo))
+ LineContent = \
+ LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
+
+ if LineContent != '':
+ #
+ # Find Macro
+ #
+ Name, Value = MacroParser((LineContent, LineNo),
+ FileName,
+ DT.MODEL_EFI_GUID,
+ self.FileLocalMacros)
+ if Name is not None:
+ SectionMacros[Name] = Value
+ CommentsList = []
+ ValueList = []
+ continue
+
+ TokenList = GetSplitValueList(LineContent, DT.TAB_VALUE_SPLIT, 1)
+ ValueList[0:len(TokenList)] = TokenList
+
+ #
+ # Replace with Local section Macro and [Defines] section Macro.
+ #
+ ValueList = [InfExpandMacro(Value, (FileName, LineContent, LineNo),
+ self.FileLocalMacros, SectionMacros, True)
+ for Value in ValueList]
+
+ CurrentLineVar = (LineContent, LineNo, FileName)
+
+
+ if len(ValueList) >= 1:
+ GuidList.append((ValueList, CommentsList, CurrentLineVar))
+ CommentsList = []
+ ValueList = []
+ continue
+
+ #
+ # Current section archs
+ #
+ ArchList = []
+ LineIndex = -1
+ for Item in self.LastSectionHeaderContent:
+ LineIndex = Item[3]
+ if Item[1] not in ArchList:
+ ArchList.append(Item[1])
+
+ if not InfSectionObject.SetGuid(GuidList, Arch=ArchList):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Guid]"),
+ File=FileName,
+ Line=LineIndex)
+
+ ## InfPpiParser
+ #
+ #
+ def InfPpiParser(self, SectionString, InfSectionObject, FileName):
+ #
+ # Macro defined in this section
+ #
+ SectionMacros = {}
+ ValueList = []
+ PpiList = []
+ CommentsList = []
+ CurrentLineVar = None
+ #
+ # Parse section content
+ #
+ for Line in SectionString:
+ LineContent = Line[0]
+ LineNo = Line[1]
+
+ if LineContent.strip() == '':
+ CommentsList = []
+ continue
+
+ if LineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
+ CommentsList.append(Line)
+ continue
+ else:
+ #
+ # Encounter a PPI entry
+ #
+ if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
+ CommentsList.append((
+ LineContent[LineContent.find(DT.TAB_COMMENT_SPLIT):],
+ LineNo))
+ LineContent = \
+ LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
+
+ if LineContent != '':
+ #
+ # Find Macro
+ #
+ Name, Value = MacroParser((LineContent, LineNo),
+ FileName,
+ DT.MODEL_EFI_PPI,
+ self.FileLocalMacros)
+ if Name is not None:
+ SectionMacros[Name] = Value
+ ValueList = []
+ CommentsList = []
+ continue
+
+ TokenList = GetSplitValueList(LineContent, DT.TAB_VALUE_SPLIT, 1)
+ ValueList[0:len(TokenList)] = TokenList
+
+ #
+ # Replace with Local section Macro and [Defines] section Macro.
+ #
+ ValueList = [InfExpandMacro(Value, (FileName, LineContent, LineNo), self.FileLocalMacros, SectionMacros)
+ for Value in ValueList]
+
+ CurrentLineVar = (LineContent, LineNo, FileName)
+
+ if len(ValueList) >= 1:
+ PpiList.append((ValueList, CommentsList, CurrentLineVar))
+ ValueList = []
+ CommentsList = []
+ continue
+
+ #
+ # Current section archs
+ #
+ ArchList = []
+ LineIndex = -1
+ for Item in self.LastSectionHeaderContent:
+ LineIndex = Item[3]
+ if Item[1] not in ArchList:
+ ArchList.append(Item[1])
+
+ if not InfSectionObject.SetPpi(PpiList, Arch=ArchList):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Ppis]"),
+ File=FileName,
+ Line=LineIndex)
+
+ ## InfUserExtensionParser
+ #
+ #
+ def InfUserExtensionParser(self, SectionString, InfSectionObject, FileName):
+
+ UserExtensionContent = ''
+
+ #
+ # Parse section content
+ #
+ for Line in SectionString:
+ LineContent = Line[0]
+
+# Comment the code to support user extension without any statement just the section header in []
+# if LineContent.strip() == '':
+# continue
+
+ UserExtensionContent += LineContent + DT.END_OF_LINE
+ continue
+
+ #
+ # Current section UserId, IdString
+ #
+ IdContentList = []
+ LastItem = ''
+ SectionLineNo = None
+ for Item in self.LastSectionHeaderContent:
+ UserId = Item[1]
+ IdString = Item[2]
+ Arch = Item[3]
+ SectionLineNo = Item[4]
+ if not IsValidArch(Arch):
+ Logger.Error(
+ 'InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID % (Arch),
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=SectionLineNo,
+ ExtraData=None)
+
+ if (UserId, IdString, Arch) not in IdContentList:
+ #
+ # To check the UserId and IdString valid or not.
+ #
+ if not IsValidUserId(UserId):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_UE_SECTION_USER_ID_ERROR % (Item[1]),
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=SectionLineNo,
+ ExtraData=None)
+
+ if not IsValidIdString(IdString):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_UE_SECTION_ID_STRING_ERROR % (IdString),
+ File=GlobalData.gINF_MODULE_NAME, Line=SectionLineNo,
+ ExtraData=None)
+ IdContentList.append((UserId, IdString, Arch))
+ else:
+ #
+ # Each UserExtensions section header must have a unique set
+ # of UserId, IdString and Arch values.
+ # This means that the same UserId can be used in more than one
+ # section header, provided the IdString or Arch values are
+ # different. The same IdString values can be used in more than
+ # one section header if the UserId or Arch values are
+ # different. The same UserId and the same IdString can be used
+ # in a section header if the Arch values are different in each
+ # of the section headers.
+ #
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_UE_SECTION_DUPLICATE_ERROR % (
+ IdString),
+ File=GlobalData.gINF_MODULE_NAME,
+ Line=SectionLineNo,
+ ExtraData=None)
+ LastItem = Item
+
+ if not InfSectionObject.SetUserExtension(UserExtensionContent,
+ IdContent=IdContentList,
+ LineNo=SectionLineNo):
+ Logger.Error\
+ ('InfParser', FORMAT_INVALID, \
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[UserExtension]"), \
+ File=FileName, Line=LastItem[4])
+
+ def InfProtocolParser(self, SectionString, InfSectionObject, FileName):
+ #
+ # Macro defined in this section
+ #
+ SectionMacros = {}
+ ValueList = []
+ ProtocolList = []
+ CommentsList = []
+ CurrentLineVar = None
+ #
+ # Parse section content
+ #
+ for Line in SectionString:
+ LineContent = Line[0]
+ LineNo = Line[1]
+
+ if LineContent.strip() == '':
+ CommentsList = []
+ continue
+
+ if LineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
+ CommentsList.append(Line)
+ continue
+ else:
+ #
+ # Encounter a Protocol entry
+ #
+ if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
+ CommentsList.append((
+ LineContent[LineContent.find(DT.TAB_COMMENT_SPLIT):],
+ LineNo))
+ LineContent = \
+ LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
+
+ if LineContent != '':
+ #
+ # Find Macro
+ #
+ Name, Value = MacroParser((LineContent, LineNo),
+ FileName,
+ DT.MODEL_EFI_PROTOCOL,
+ self.FileLocalMacros)
+ if Name is not None:
+ SectionMacros[Name] = Value
+ ValueList = []
+ CommentsList = []
+ continue
+
+ TokenList = GetSplitValueList(LineContent, DT.TAB_VALUE_SPLIT, 1)
+ ValueList[0:len(TokenList)] = TokenList
+
+ #
+ # Replace with Local section Macro and [Defines] section Macro.
+ #
+ ValueList = [InfExpandMacro(Value, (FileName, LineContent, LineNo), self.FileLocalMacros, SectionMacros)
+ for Value in ValueList]
+
+ CurrentLineVar = (LineContent, LineNo, FileName)
+
+ if len(ValueList) >= 1:
+ ProtocolList.append((ValueList, CommentsList, CurrentLineVar))
+ ValueList = []
+ CommentsList = []
+ continue
+
+ #
+ # Current section archs
+ #
+ ArchList = []
+ LineIndex = -1
+ for Item in self.LastSectionHeaderContent:
+ LineIndex = Item[3]
+ if Item[1] not in ArchList:
+ ArchList.append(Item[1])
+
+ if not InfSectionObject.SetProtocol(ProtocolList, Arch=ArchList):
+ Logger.Error\
+ ('InfParser', FORMAT_INVALID, \
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Protocol]"), \
+ File=FileName, Line=LineIndex)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py
new file mode 100755
index 00000000..66fae2af
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py
@@ -0,0 +1,197 @@
+## @file
+# This file contained the parser for [Libraries] sections in INF file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+'''
+InfLibrarySectionParser
+'''
+##
+# Import Modules
+#
+
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger.ToolError import FORMAT_INVALID
+from Parser.InfParserMisc import InfExpandMacro
+from Library import DataType as DT
+from Library.Parsing import MacroParser
+from Library.Misc import GetSplitValueList
+from Object.Parser.InfCommonObject import InfLineCommentObject
+from Library import GlobalData
+from Parser.InfParserMisc import IsLibInstanceInfo
+from Parser.InfAsBuiltProcess import GetLibInstanceInfo
+from Parser.InfParserMisc import InfParserSectionRoot
+
+class InfLibrarySectionParser(InfParserSectionRoot):
+ ## InfLibraryParser
+ #
+ #
+ def InfLibraryParser(self, SectionString, InfSectionObject, FileName):
+ #
+ # For Common INF file
+ #
+ if not GlobalData.gIS_BINARY_INF:
+ #
+ # Macro defined in this section
+ #
+ SectionMacros = {}
+ ValueList = []
+ LibraryList = []
+ LibStillCommentFalg = False
+ LibHeaderComments = []
+ LibLineComment = None
+ #
+ # Parse section content
+ #
+ for Line in SectionString:
+ LibLineContent = Line[0]
+ LibLineNo = Line[1]
+
+ if LibLineContent.strip() == '':
+ continue
+
+ #
+ # Found Header Comments
+ #
+ if LibLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
+ #
+ # Last line is comments, and this line go on.
+ #
+ if LibStillCommentFalg:
+ LibHeaderComments.append(Line)
+ continue
+ #
+ # First time encounter comment
+ #
+ else:
+ #
+ # Clear original data
+ #
+ LibHeaderComments = []
+ LibHeaderComments.append(Line)
+ LibStillCommentFalg = True
+ continue
+ else:
+ LibStillCommentFalg = False
+
+ if len(LibHeaderComments) >= 1:
+ LibLineComment = InfLineCommentObject()
+ LineCommentContent = ''
+ for Item in LibHeaderComments:
+ LineCommentContent += Item[0] + DT.END_OF_LINE
+ LibLineComment.SetHeaderComments(LineCommentContent)
+
+ #
+ # Find Tail comment.
+ #
+ if LibLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
+ LibTailComments = LibLineContent[LibLineContent.find(DT.TAB_COMMENT_SPLIT):]
+ LibLineContent = LibLineContent[:LibLineContent.find(DT.TAB_COMMENT_SPLIT)]
+ if LibLineComment is None:
+ LibLineComment = InfLineCommentObject()
+ LibLineComment.SetTailComments(LibTailComments)
+
+ #
+ # Find Macro
+ #
+ Name, Value = MacroParser((LibLineContent, LibLineNo),
+ FileName,
+ DT.MODEL_EFI_LIBRARY_CLASS,
+ self.FileLocalMacros)
+ if Name is not None:
+ SectionMacros[Name] = Value
+ LibLineComment = None
+ LibHeaderComments = []
+ continue
+
+ TokenList = GetSplitValueList(LibLineContent, DT.TAB_VALUE_SPLIT, 1)
+ ValueList[0:len(TokenList)] = TokenList
+
+ #
+ # Replace with Local section Macro and [Defines] section Macro.
+ #
+ ValueList = [InfExpandMacro(Value, (FileName, LibLineContent, LibLineNo),
+ self.FileLocalMacros, SectionMacros, True)
+ for Value in ValueList]
+
+ LibraryList.append((ValueList, LibLineComment,
+ (LibLineContent, LibLineNo, FileName)))
+ ValueList = []
+ LibLineComment = None
+ LibTailComments = ''
+ LibHeaderComments = []
+
+ continue
+
+ #
+ # Current section archs
+ #
+ KeyList = []
+ for Item in self.LastSectionHeaderContent:
+ if (Item[1], Item[2]) not in KeyList:
+ KeyList.append((Item[1], Item[2]))
+
+ if not InfSectionObject.SetLibraryClasses(LibraryList, KeyList=KeyList):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Library]"),
+ File=FileName,
+ Line=Item[3])
+ #
+ # For Binary INF
+ #
+ else:
+ self.InfAsBuiltLibraryParser(SectionString, InfSectionObject, FileName)
+
+ def InfAsBuiltLibraryParser(self, SectionString, InfSectionObject, FileName):
+ LibraryList = []
+ LibInsFlag = False
+ for Line in SectionString:
+ LineContent = Line[0]
+ LineNo = Line[1]
+
+ if LineContent.strip() == '':
+ LibInsFlag = False
+ continue
+
+ if not LineContent.strip().startswith("#"):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_LIB_CONTATIN_ASBUILD_AND_COMMON,
+ File=FileName,
+ Line=LineNo,
+ ExtraData=LineContent)
+
+ if IsLibInstanceInfo(LineContent):
+ LibInsFlag = True
+ continue
+
+ if LibInsFlag:
+ LibGuid, LibVer = GetLibInstanceInfo(LineContent, GlobalData.gWORKSPACE, LineNo, FileName)
+ #
+ # If the VERSION_STRING is missing from the INF file, tool should default to "0".
+ #
+ if LibVer == '':
+ LibVer = '0'
+ if LibGuid != '':
+ if (LibGuid, LibVer) not in LibraryList:
+ LibraryList.append((LibGuid, LibVer))
+
+ #
+ # Current section archs
+ #
+ KeyList = []
+ Item = ['', '', '']
+ for Item in self.LastSectionHeaderContent:
+ if (Item[1], Item[2]) not in KeyList:
+ KeyList.append((Item[1], Item[2]))
+
+ if not InfSectionObject.SetLibraryClasses(LibraryList, KeyList=KeyList):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Library]"),
+ File=FileName,
+ Line=Item[3])
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py
new file mode 100755
index 00000000..bb6777e8
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py
@@ -0,0 +1,134 @@
+## @file
+# This file contained the parser for [Packages] sections in INF file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+'''
+InfPackageSectionParser
+'''
+##
+# Import Modules
+#
+
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger.ToolError import FORMAT_INVALID
+from Parser.InfParserMisc import InfExpandMacro
+from Library import DataType as DT
+from Library.Parsing import MacroParser
+from Library.Misc import GetSplitValueList
+from Object.Parser.InfCommonObject import InfLineCommentObject
+from Parser.InfParserMisc import InfParserSectionRoot
+
+class InfPackageSectionParser(InfParserSectionRoot):
+ ## InfPackageParser
+ #
+ #
+ def InfPackageParser(self, SectionString, InfSectionObject, FileName):
+ #
+ # Macro defined in this section
+ #
+ SectionMacros = {}
+ ValueList = []
+ PackageList = []
+ StillCommentFalg = False
+ HeaderComments = []
+ LineComment = None
+ #
+ # Parse section content
+ #
+ for Line in SectionString:
+ PkgLineContent = Line[0]
+ PkgLineNo = Line[1]
+
+ if PkgLineContent.strip() == '':
+ continue
+
+ #
+ # Find Header Comments
+ #
+ if PkgLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
+ #
+ # Last line is comments, and this line go on.
+ #
+ if StillCommentFalg:
+ HeaderComments.append(Line)
+ continue
+ #
+ # First time encounter comment
+ #
+ else:
+ #
+ # Clear original data
+ #
+ HeaderComments = []
+ HeaderComments.append(Line)
+ StillCommentFalg = True
+ continue
+ else:
+ StillCommentFalg = False
+
+ if len(HeaderComments) >= 1:
+ LineComment = InfLineCommentObject()
+ LineCommentContent = ''
+ for Item in HeaderComments:
+ LineCommentContent += Item[0] + DT.END_OF_LINE
+ LineComment.SetHeaderComments(LineCommentContent)
+
+ #
+ # Find Tail comment.
+ #
+ if PkgLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
+ TailComments = PkgLineContent[PkgLineContent.find(DT.TAB_COMMENT_SPLIT):]
+ PkgLineContent = PkgLineContent[:PkgLineContent.find(DT.TAB_COMMENT_SPLIT)]
+ if LineComment is None:
+ LineComment = InfLineCommentObject()
+ LineComment.SetTailComments(TailComments)
+ #
+ # Find Macro
+ #
+ Name, Value = MacroParser((PkgLineContent, PkgLineNo),
+ FileName,
+ DT.MODEL_META_DATA_PACKAGE,
+ self.FileLocalMacros)
+ if Name is not None:
+ SectionMacros[Name] = Value
+ LineComment = None
+ HeaderComments = []
+ continue
+
+ TokenList = GetSplitValueList(PkgLineContent, DT.TAB_VALUE_SPLIT, 1)
+ ValueList[0:len(TokenList)] = TokenList
+
+ #
+ # Replace with Local section Macro and [Defines] section Macro.
+ #
+ ValueList = [InfExpandMacro(Value, (FileName, PkgLineContent, PkgLineNo),
+ self.FileLocalMacros, SectionMacros, True)
+ for Value in ValueList]
+
+ PackageList.append((ValueList, LineComment,
+ (PkgLineContent, PkgLineNo, FileName)))
+ ValueList = []
+ LineComment = None
+ TailComments = ''
+ HeaderComments = []
+ continue
+
+ #
+ # Current section archs
+ #
+ ArchList = []
+ for Item in self.LastSectionHeaderContent:
+ if Item[1] not in ArchList:
+ ArchList.append(Item[1])
+
+ if not InfSectionObject.SetPackages(PackageList, Arch = ArchList):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR\
+ %("[Packages]"),
+ File=FileName,
+ Line=Item[3])
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfParser.py
new file mode 100755
index 00000000..711b5e85
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfParser.py
@@ -0,0 +1,680 @@
+## @file
+# This file contained the parser for INF file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+InfParser
+'''
+
+##
+# Import Modules
+#
+import re
+import os
+from copy import deepcopy
+
+from Library.StringUtils import GetSplitValueList
+from Library.StringUtils import ConvertSpecialChar
+from Library.Misc import ProcessLineExtender
+from Library.Misc import ProcessEdkComment
+from Library.Parsing import NormPath
+from Library.ParserValidate import IsValidInfMoudleTypeList
+from Library.ParserValidate import IsValidArch
+from Library import DataType as DT
+from Library import GlobalData
+
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger.ToolError import FORMAT_INVALID
+from Logger.ToolError import FILE_READ_FAILURE
+from Logger.ToolError import PARSER_ERROR
+
+from Object.Parser.InfCommonObject import InfSectionCommonDef
+from Parser.InfSectionParser import InfSectionParser
+from Parser.InfParserMisc import gINF_SECTION_DEF
+from Parser.InfParserMisc import IsBinaryInf
+
+## OpenInfFile
+#
+#
+def OpenInfFile(Filename):
+ FileLinesList = []
+
+ try:
+ FInputfile = open(Filename, "r")
+ try:
+ FileLinesList = FInputfile.readlines()
+ except BaseException:
+ Logger.Error("InfParser",
+ FILE_READ_FAILURE,
+ ST.ERR_FILE_OPEN_FAILURE,
+ File=Filename)
+ finally:
+ FInputfile.close()
+ except BaseException:
+ Logger.Error("InfParser",
+ FILE_READ_FAILURE,
+ ST.ERR_FILE_OPEN_FAILURE,
+ File=Filename)
+
+ return FileLinesList
+
+## InfParser
+#
+# This class defined the structure used in InfParser object
+#
+# @param InfObject: Inherited from InfSectionParser class
+# @param Filename: Input value for Filename of INF file, default is
+# None
+# @param WorkspaceDir: Input value for current workspace directory,
+# default is None
+#
+class InfParser(InfSectionParser):
+
+ def __init__(self, Filename = None, WorkspaceDir = None):
+
+ #
+ # Call parent class construct function
+ #
+ InfSectionParser.__init__()
+
+ self.WorkspaceDir = WorkspaceDir
+ self.SupArchList = DT.ARCH_LIST
+ self.EventList = []
+ self.HobList = []
+ self.BootModeList = []
+
+ #
+ # Load Inf file if filename is not None
+ #
+ if Filename is not None:
+ self.ParseInfFile(Filename)
+
+ ## Parse INF file
+ #
+ # Parse the file if it exists
+ #
+ # @param Filename: Input value for filename of INF file
+ #
+ def ParseInfFile(self, Filename):
+
+ Filename = NormPath(Filename)
+ (Path, Name) = os.path.split(Filename)
+ self.FullPath = Filename
+ self.RelaPath = Path
+ self.FileName = Name
+ GlobalData.gINF_MODULE_DIR = Path
+ GlobalData.gINF_MODULE_NAME = self.FullPath
+ GlobalData.gIS_BINARY_INF = False
+ #
+ # Initialize common data
+ #
+ LineNo = 0
+ CurrentSection = DT.MODEL_UNKNOWN
+ SectionLines = []
+
+ #
+ # Flags
+ #
+ HeaderCommentStart = False
+ HeaderCommentEnd = False
+ HeaderStarLineNo = -1
+ BinaryHeaderCommentStart = False
+ BinaryHeaderCommentEnd = False
+ BinaryHeaderStarLineNo = -1
+
+ #
+ # While Section ends. parse whole section contents.
+ #
+ NewSectionStartFlag = False
+ FirstSectionStartFlag = False
+
+ #
+ # Parse file content
+ #
+ CommentBlock = []
+
+ #
+ # Variables for Event/Hob/BootMode
+ #
+ self.EventList = []
+ self.HobList = []
+ self.BootModeList = []
+ SectionType = ''
+
+ FileLinesList = OpenInfFile (Filename)
+
+ #
+ # One INF file can only has one [Defines] section.
+ #
+ DefineSectionParsedFlag = False
+
+ #
+ # Convert special characters in lines to space character.
+ #
+ FileLinesList = ConvertSpecialChar(FileLinesList)
+
+ #
+ # Process Line Extender
+ #
+ FileLinesList = ProcessLineExtender(FileLinesList)
+
+ #
+ # Process EdkI INF style comment if found
+ #
+ OrigLines = [Line for Line in FileLinesList]
+ FileLinesList, EdkCommentStartPos = ProcessEdkComment(FileLinesList)
+
+ #
+ # Judge whether the INF file is Binary INF or not
+ #
+ if IsBinaryInf(FileLinesList):
+ GlobalData.gIS_BINARY_INF = True
+
+ InfSectionCommonDefObj = None
+
+ for Line in FileLinesList:
+ LineNo = LineNo + 1
+ Line = Line.strip()
+ if (LineNo < len(FileLinesList) - 1):
+ NextLine = FileLinesList[LineNo].strip()
+
+ #
+ # blank line
+ #
+ if (Line == '' or not Line) and LineNo == len(FileLinesList):
+ LastSectionFalg = True
+
+ #
+ # check whether file header comment section started
+ #
+ if Line.startswith(DT.TAB_SPECIAL_COMMENT) and \
+ (Line.find(DT.TAB_HEADER_COMMENT) > -1) and \
+ not HeaderCommentStart and not HeaderCommentEnd:
+
+ CurrentSection = DT.MODEL_META_DATA_FILE_HEADER
+ #
+ # Append the first line to section lines.
+ #
+ HeaderStarLineNo = LineNo
+ SectionLines.append((Line, LineNo))
+ HeaderCommentStart = True
+ continue
+
+ #
+ # Collect Header content.
+ #
+ if (Line.startswith(DT.TAB_COMMENT_SPLIT) and CurrentSection == DT.MODEL_META_DATA_FILE_HEADER) and\
+ HeaderCommentStart and not Line.startswith(DT.TAB_SPECIAL_COMMENT) and not\
+ HeaderCommentEnd and NextLine != '':
+ SectionLines.append((Line, LineNo))
+ continue
+ #
+ # Header content end
+ #
+ if (Line.startswith(DT.TAB_SPECIAL_COMMENT) or not Line.strip().startswith("#")) and HeaderCommentStart \
+ and not HeaderCommentEnd:
+ HeaderCommentEnd = True
+ BinaryHeaderCommentStart = False
+ BinaryHeaderCommentEnd = False
+ HeaderCommentStart = False
+ if Line.find(DT.TAB_BINARY_HEADER_COMMENT) > -1:
+ self.InfHeaderParser(SectionLines, self.InfHeader, self.FileName)
+ SectionLines = []
+ else:
+ SectionLines.append((Line, LineNo))
+ #
+ # Call Header comment parser.
+ #
+ self.InfHeaderParser(SectionLines, self.InfHeader, self.FileName)
+ SectionLines = []
+ continue
+
+ #
+ # check whether binary header comment section started
+ #
+ if Line.startswith(DT.TAB_SPECIAL_COMMENT) and \
+ (Line.find(DT.TAB_BINARY_HEADER_COMMENT) > -1) and \
+ not BinaryHeaderCommentStart:
+ SectionLines = []
+ CurrentSection = DT.MODEL_META_DATA_FILE_HEADER
+ #
+ # Append the first line to section lines.
+ #
+ BinaryHeaderStarLineNo = LineNo
+ SectionLines.append((Line, LineNo))
+ BinaryHeaderCommentStart = True
+ HeaderCommentEnd = True
+ continue
+
+ #
+ # check whether there are more than one binary header exist
+ #
+ if Line.startswith(DT.TAB_SPECIAL_COMMENT) and BinaryHeaderCommentStart and \
+ not BinaryHeaderCommentEnd and (Line.find(DT.TAB_BINARY_HEADER_COMMENT) > -1):
+ Logger.Error('Parser',
+ FORMAT_INVALID,
+ ST.ERR_MULTIPLE_BINARYHEADER_EXIST,
+ File=Filename)
+
+ #
+ # Collect Binary Header content.
+ #
+ if (Line.startswith(DT.TAB_COMMENT_SPLIT) and CurrentSection == DT.MODEL_META_DATA_FILE_HEADER) and\
+ BinaryHeaderCommentStart and not Line.startswith(DT.TAB_SPECIAL_COMMENT) and not\
+ BinaryHeaderCommentEnd and NextLine != '':
+ SectionLines.append((Line, LineNo))
+ continue
+ #
+ # Binary Header content end
+ #
+ if (Line.startswith(DT.TAB_SPECIAL_COMMENT) or not Line.strip().startswith(DT.TAB_COMMENT_SPLIT)) and \
+ BinaryHeaderCommentStart and not BinaryHeaderCommentEnd:
+ SectionLines.append((Line, LineNo))
+ BinaryHeaderCommentStart = False
+ #
+ # Call Binary Header comment parser.
+ #
+ self.InfHeaderParser(SectionLines, self.InfBinaryHeader, self.FileName, True)
+ SectionLines = []
+ BinaryHeaderCommentEnd = True
+ continue
+ #
+ # Find a new section tab
+ # Or at the last line of INF file,
+ # need to process the last section.
+ #
+ LastSectionFalg = False
+ if LineNo == len(FileLinesList):
+ LastSectionFalg = True
+
+ if Line.startswith(DT.TAB_COMMENT_SPLIT) and not Line.startswith(DT.TAB_SPECIAL_COMMENT):
+ SectionLines.append((Line, LineNo))
+ if not LastSectionFalg:
+ continue
+
+ #
+ # Encountered a section. start with '[' and end with ']'
+ #
+ if (Line.startswith(DT.TAB_SECTION_START) and \
+ Line.find(DT.TAB_SECTION_END) > -1) or LastSectionFalg:
+
+ HeaderCommentEnd = True
+ BinaryHeaderCommentEnd = True
+
+ if not LastSectionFalg:
+ #
+ # check to prevent '#' inside section header
+ #
+ HeaderContent = Line[1:Line.find(DT.TAB_SECTION_END)]
+ if HeaderContent.find(DT.TAB_COMMENT_SPLIT) != -1:
+ Logger.Error("InfParser",
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_SECTION_HEADER_INVALID,
+ File=self.FullPath,
+ Line=LineNo,
+ ExtraData=Line)
+
+ #
+ # Keep last time section header content for section parser
+ # usage.
+ #
+ self.LastSectionHeaderContent = deepcopy(self.SectionHeaderContent)
+
+ #
+ # TailComments in section define.
+ #
+ TailComments = ''
+ CommentIndex = Line.find(DT.TAB_COMMENT_SPLIT)
+ if CommentIndex > -1:
+ TailComments = Line[CommentIndex:]
+ Line = Line[:CommentIndex]
+
+ InfSectionCommonDefObj = InfSectionCommonDef()
+ if TailComments != '':
+ InfSectionCommonDefObj.SetTailComments(TailComments)
+ if CommentBlock != '':
+ InfSectionCommonDefObj.SetHeaderComments(CommentBlock)
+ CommentBlock = []
+ #
+ # Call section parser before section header parer to avoid encounter EDKI INF file
+ #
+ if CurrentSection == DT.MODEL_META_DATA_DEFINE:
+ DefineSectionParsedFlag = self._CallSectionParsers(CurrentSection,
+ DefineSectionParsedFlag, SectionLines,
+ InfSectionCommonDefObj, LineNo)
+ #
+ # Compare the new section name with current
+ #
+ self.SectionHeaderParser(Line, self.FileName, LineNo)
+
+ self._CheckSectionHeaders(Line, LineNo)
+
+ SectionType = _ConvertSecNameToType(self.SectionHeaderContent[0][0])
+
+ if not FirstSectionStartFlag:
+ CurrentSection = SectionType
+ FirstSectionStartFlag = True
+ else:
+ NewSectionStartFlag = True
+ else:
+ SectionLines.append((Line, LineNo))
+ continue
+
+ if LastSectionFalg:
+ SectionLines, CurrentSection = self._ProcessLastSection(SectionLines, Line, LineNo, CurrentSection)
+
+ #
+ # End of section content collect.
+ # Parser the section content collected previously.
+ #
+ if NewSectionStartFlag or LastSectionFalg:
+ if CurrentSection != DT.MODEL_META_DATA_DEFINE or \
+ (LastSectionFalg and CurrentSection == DT.MODEL_META_DATA_DEFINE):
+ DefineSectionParsedFlag = self._CallSectionParsers(CurrentSection,
+ DefineSectionParsedFlag, SectionLines,
+ InfSectionCommonDefObj, LineNo)
+
+ CurrentSection = SectionType
+ #
+ # Clear section lines
+ #
+ SectionLines = []
+
+ if HeaderStarLineNo == -1:
+ Logger.Error("InfParser",
+ FORMAT_INVALID,
+ ST.ERR_NO_SOURCE_HEADER,
+ File=self.FullPath)
+ if BinaryHeaderStarLineNo > -1 and HeaderStarLineNo > -1 and HeaderStarLineNo > BinaryHeaderStarLineNo:
+ Logger.Error("InfParser",
+ FORMAT_INVALID,
+ ST.ERR_BINARY_HEADER_ORDER,
+ File=self.FullPath)
+ #
+ # EDKII INF should not have EDKI style comment
+ #
+ if EdkCommentStartPos != -1:
+ Logger.Error("InfParser",
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_EDKI_COMMENT_IN_EDKII,
+ File=self.FullPath,
+ Line=EdkCommentStartPos + 1,
+ ExtraData=OrigLines[EdkCommentStartPos])
+
+ #
+ # extract [Event] [Hob] [BootMode] sections
+ #
+ self._ExtractEventHobBootMod(FileLinesList)
+
+ ## _CheckSectionHeaders
+ #
+ #
+ def _CheckSectionHeaders(self, Line, LineNo):
+ if len(self.SectionHeaderContent) == 0:
+ Logger.Error("InfParser",
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_SECTION_HEADER_INVALID,
+ File=self.FullPath,
+ Line=LineNo, ExtraData=Line)
+ else:
+ for SectionItem in self.SectionHeaderContent:
+ ArchList = []
+ #
+ # Not cover Depex/UserExtension section header
+ # check.
+ #
+ if SectionItem[0].strip().upper() == DT.TAB_INF_FIXED_PCD.upper() or \
+ SectionItem[0].strip().upper() == DT.TAB_INF_PATCH_PCD.upper() or \
+ SectionItem[0].strip().upper() == DT.TAB_INF_PCD_EX.upper() or \
+ SectionItem[0].strip().upper() == DT.TAB_INF_PCD.upper() or \
+ SectionItem[0].strip().upper() == DT.TAB_INF_FEATURE_PCD.upper():
+ ArchList = GetSplitValueList(SectionItem[1].strip(), ' ')
+ else:
+ ArchList = [SectionItem[1].strip()]
+
+ for Arch in ArchList:
+ if (not IsValidArch(Arch)) and \
+ (SectionItem[0].strip().upper() != DT.TAB_DEPEX.upper()) and \
+ (SectionItem[0].strip().upper() != DT.TAB_USER_EXTENSIONS.upper()) and \
+ (SectionItem[0].strip().upper() != DT.TAB_COMMON_DEFINES.upper()):
+ Logger.Error("InfParser",
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(SectionItem[1]),
+ File=self.FullPath,
+ Line=LineNo, ExtraData=Line)
+ #
+ # Check if the ModuleType is valid
+ #
+ ChkModSectionList = ['LIBRARYCLASSES']
+ if (self.SectionHeaderContent[0][0].upper() in ChkModSectionList):
+ if SectionItem[2].strip().upper():
+ MoudleTypeList = GetSplitValueList(
+ SectionItem[2].strip().upper())
+ if (not IsValidInfMoudleTypeList(MoudleTypeList)):
+ Logger.Error("InfParser",
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(SectionItem[2]),
+ File=self.FullPath, Line=LineNo,
+ ExtraData=Line)
+
+ ## _CallSectionParsers
+ #
+ #
+ def _CallSectionParsers(self, CurrentSection, DefineSectionParsedFlag,
+ SectionLines, InfSectionCommonDefObj, LineNo):
+ if CurrentSection == DT.MODEL_META_DATA_DEFINE:
+ if not DefineSectionParsedFlag:
+ self.InfDefineParser(SectionLines,
+ self.InfDefSection,
+ self.FullPath,
+ InfSectionCommonDefObj)
+ DefineSectionParsedFlag = True
+ else:
+ Logger.Error("Parser",
+ PARSER_ERROR,
+ ST.ERR_INF_PARSER_MULTI_DEFINE_SECTION,
+ File=self.FullPath,
+ RaiseError = Logger.IS_RAISE_ERROR)
+
+ elif CurrentSection == DT.MODEL_META_DATA_BUILD_OPTION:
+ self.InfBuildOptionParser(SectionLines,
+ self.InfBuildOptionSection,
+ self.FullPath)
+
+ elif CurrentSection == DT.MODEL_EFI_LIBRARY_CLASS:
+ self.InfLibraryParser(SectionLines,
+ self.InfLibraryClassSection,
+ self.FullPath)
+
+ elif CurrentSection == DT.MODEL_META_DATA_PACKAGE:
+ self.InfPackageParser(SectionLines,
+ self.InfPackageSection,
+ self.FullPath)
+ #
+ # [Pcd] Sections, put it together
+ #
+ elif CurrentSection == DT.MODEL_PCD_FIXED_AT_BUILD or \
+ CurrentSection == DT.MODEL_PCD_PATCHABLE_IN_MODULE or \
+ CurrentSection == DT.MODEL_PCD_FEATURE_FLAG or \
+ CurrentSection == DT.MODEL_PCD_DYNAMIC_EX or \
+ CurrentSection == DT.MODEL_PCD_DYNAMIC:
+ self.InfPcdParser(SectionLines,
+ self.InfPcdSection,
+ self.FullPath)
+
+ elif CurrentSection == DT.MODEL_EFI_SOURCE_FILE:
+ self.InfSourceParser(SectionLines,
+ self.InfSourcesSection,
+ self.FullPath)
+
+ elif CurrentSection == DT.MODEL_META_DATA_USER_EXTENSION:
+ self.InfUserExtensionParser(SectionLines,
+ self.InfUserExtensionSection,
+ self.FullPath)
+
+ elif CurrentSection == DT.MODEL_EFI_PROTOCOL:
+ self.InfProtocolParser(SectionLines,
+ self.InfProtocolSection,
+ self.FullPath)
+
+ elif CurrentSection == DT.MODEL_EFI_PPI:
+ self.InfPpiParser(SectionLines,
+ self.InfPpiSection,
+ self.FullPath)
+
+ elif CurrentSection == DT.MODEL_EFI_GUID:
+ self.InfGuidParser(SectionLines,
+ self.InfGuidSection,
+ self.FullPath)
+
+ elif CurrentSection == DT.MODEL_EFI_DEPEX:
+ self.InfDepexParser(SectionLines,
+ self.InfDepexSection,
+ self.FullPath)
+
+ elif CurrentSection == DT.MODEL_EFI_BINARY_FILE:
+ self.InfBinaryParser(SectionLines,
+ self.InfBinariesSection,
+ self.FullPath)
+ #
+ # Unknown section type found, raise error.
+ #
+ else:
+ if len(self.SectionHeaderContent) >= 1:
+ Logger.Error("Parser",
+ PARSER_ERROR,
+ ST.ERR_INF_PARSER_UNKNOWN_SECTION,
+ File=self.FullPath, Line=LineNo,
+ RaiseError = Logger.IS_RAISE_ERROR)
+ else:
+ Logger.Error("Parser",
+ PARSER_ERROR,
+ ST.ERR_INF_PARSER_NO_SECTION_ERROR,
+ File=self.FullPath, Line=LineNo,
+ RaiseError = Logger.IS_RAISE_ERROR)
+
+ return DefineSectionParsedFlag
+
+ def _ExtractEventHobBootMod(self, FileLinesList):
+ SpecialSectionStart = False
+ CheckLocation = False
+ GFindSpecialCommentRe = \
+ re.compile(r"""#(?:\s*)\[(.*?)\](?:.*)""", re.DOTALL)
+ GFindNewSectionRe2 = \
+ re.compile(r"""#?(\s*)\[(.*?)\](.*)""", re.DOTALL)
+ LineNum = 0
+ Element = []
+ for Line in FileLinesList:
+ Line = Line.strip()
+ LineNum += 1
+ MatchObject = GFindSpecialCommentRe.search(Line)
+ if MatchObject:
+ SpecialSectionStart = True
+ Element = []
+ if MatchObject.group(1).upper().startswith("EVENT"):
+ List = self.EventList
+ elif MatchObject.group(1).upper().startswith("HOB"):
+ List = self.HobList
+ elif MatchObject.group(1).upper().startswith("BOOTMODE"):
+ List = self.BootModeList
+ else:
+ SpecialSectionStart = False
+ CheckLocation = False
+ if SpecialSectionStart:
+ Element.append([Line, LineNum])
+ List.append(Element)
+ else:
+ #
+ # if currently in special section, try to detect end of current section
+ #
+ MatchObject = GFindNewSectionRe2.search(Line)
+ if SpecialSectionStart:
+ if MatchObject:
+ SpecialSectionStart = False
+ CheckLocation = False
+ Element = []
+ elif not Line:
+ SpecialSectionStart = False
+ CheckLocation = True
+ Element = []
+ else:
+ if not Line.startswith(DT.TAB_COMMENT_SPLIT):
+ Logger.Warn("Parser",
+ ST.WARN_SPECIAL_SECTION_LOCATION_WRONG,
+ File=self.FullPath, Line=LineNum)
+ SpecialSectionStart = False
+ CheckLocation = False
+ Element = []
+ else:
+ Element.append([Line, LineNum])
+ else:
+ if CheckLocation:
+ if MatchObject:
+ CheckLocation = False
+ elif Line:
+ Logger.Warn("Parser",
+ ST.WARN_SPECIAL_SECTION_LOCATION_WRONG,
+ File=self.FullPath, Line=LineNum)
+ CheckLocation = False
+
+ if len(self.BootModeList) >= 1:
+ self.InfSpecialCommentParser(self.BootModeList,
+ self.InfSpecialCommentSection,
+ self.FileName,
+ DT.TYPE_BOOTMODE_SECTION)
+
+ if len(self.EventList) >= 1:
+ self.InfSpecialCommentParser(self.EventList,
+ self.InfSpecialCommentSection,
+ self.FileName,
+ DT.TYPE_EVENT_SECTION)
+
+ if len(self.HobList) >= 1:
+ self.InfSpecialCommentParser(self.HobList,
+ self.InfSpecialCommentSection,
+ self.FileName,
+ DT.TYPE_HOB_SECTION)
+ ## _ProcessLastSection
+ #
+ #
+ def _ProcessLastSection(self, SectionLines, Line, LineNo, CurrentSection):
+ #
+ # The last line is a section header. will discard it.
+ #
+ if not (Line.startswith(DT.TAB_SECTION_START) and Line.find(DT.TAB_SECTION_END) > -1):
+ SectionLines.append((Line, LineNo))
+
+ if len(self.SectionHeaderContent) >= 1:
+ TemSectionName = self.SectionHeaderContent[0][0].upper()
+ if TemSectionName.upper() not in gINF_SECTION_DEF.keys():
+ Logger.Error("InfParser",
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_UNKNOWN_SECTION,
+ File=self.FullPath,
+ Line=LineNo,
+ ExtraData=Line,
+ RaiseError = Logger.IS_RAISE_ERROR
+ )
+ else:
+ CurrentSection = gINF_SECTION_DEF[TemSectionName]
+ self.LastSectionHeaderContent = self.SectionHeaderContent
+
+ return SectionLines, CurrentSection
+
+## _ConvertSecNameToType
+#
+#
+def _ConvertSecNameToType(SectionName):
+ SectionType = ''
+ if SectionName.upper() not in gINF_SECTION_DEF.keys():
+ SectionType = DT.MODEL_UNKNOWN
+ else:
+ SectionType = gINF_SECTION_DEF[SectionName.upper()]
+
+ return SectionType
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py
new file mode 100755
index 00000000..0a51ac28
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py
@@ -0,0 +1,216 @@
+## @file
+# This file contained the miscellaneous functions for INF parser
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+InfParserMisc
+'''
+
+##
+# Import Modules
+#
+import re
+
+
+from Library import DataType as DT
+
+
+from Library.StringUtils import gMACRO_PATTERN
+from Library.StringUtils import ReplaceMacro
+from Object.Parser.InfMisc import ErrorInInf
+from Logger.StringTable import ERR_MARCO_DEFINITION_MISS_ERROR
+
+#
+# Global variable
+#
+
+#
+# Sections can exist in INF file
+#
+gINF_SECTION_DEF = {
+ DT.TAB_UNKNOWN.upper() : DT.MODEL_UNKNOWN,
+ DT.TAB_HEADER.upper() : DT.MODEL_META_DATA_FILE_HEADER,
+ DT.TAB_INF_DEFINES.upper() : DT.MODEL_META_DATA_DEFINE,
+ DT.TAB_BUILD_OPTIONS.upper() : DT.MODEL_META_DATA_BUILD_OPTION,
+ DT.TAB_LIBRARY_CLASSES.upper() : DT.MODEL_EFI_LIBRARY_CLASS,
+ DT.TAB_PACKAGES.upper() : DT.MODEL_META_DATA_PACKAGE,
+ DT.TAB_INF_FIXED_PCD.upper() : DT.MODEL_PCD_FIXED_AT_BUILD,
+ DT.TAB_INF_PATCH_PCD.upper() : DT.MODEL_PCD_PATCHABLE_IN_MODULE,
+ DT.TAB_INF_FEATURE_PCD.upper() : DT.MODEL_PCD_FEATURE_FLAG,
+ DT.TAB_INF_PCD_EX.upper() : DT.MODEL_PCD_DYNAMIC_EX,
+ DT.TAB_INF_PCD.upper() : DT.MODEL_PCD_DYNAMIC,
+ DT.TAB_SOURCES.upper() : DT.MODEL_EFI_SOURCE_FILE,
+ DT.TAB_GUIDS.upper() : DT.MODEL_EFI_GUID,
+ DT.TAB_PROTOCOLS.upper() : DT.MODEL_EFI_PROTOCOL,
+ DT.TAB_PPIS.upper() : DT.MODEL_EFI_PPI,
+ DT.TAB_DEPEX.upper() : DT.MODEL_EFI_DEPEX,
+ DT.TAB_BINARIES.upper() : DT.MODEL_EFI_BINARY_FILE,
+ DT.TAB_USER_EXTENSIONS.upper() : DT.MODEL_META_DATA_USER_EXTENSION
+ #
+ # EDK1 section
+ # TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE
+ #
+ }
+
+## InfExpandMacro
+#
+# Expand MACRO definition with MACROs defined in [Defines] section and specific section.
+# The MACROs defined in specific section has high priority and will be expanded firstly.
+#
+# @param LineInfo Contain information of FileName, LineContent, LineNo
+# @param GlobalMacros MACROs defined in INF [Defines] section
+# @param SectionMacros MACROs defined in INF specific section
+# @param Flag If the flag set to True, need to skip macros in a quoted string
+#
+def InfExpandMacro(Content, LineInfo, GlobalMacros=None, SectionMacros=None, Flag=False):
+ if GlobalMacros is None:
+ GlobalMacros = {}
+ if SectionMacros is None:
+ SectionMacros = {}
+
+ FileName = LineInfo[0]
+ LineContent = LineInfo[1]
+ LineNo = LineInfo[2]
+
+ # Don't expand macros in comments
+ if LineContent.strip().startswith("#"):
+ return Content
+
+ NewLineInfo = (FileName, LineNo, LineContent)
+
+ #
+ # First, replace MARCOs with value defined in specific section
+ #
+ Content = ReplaceMacro (Content,
+ SectionMacros,
+ False,
+ (LineContent, LineNo),
+ FileName,
+ Flag)
+ #
+ # Then replace MARCOs with value defined in [Defines] section
+ #
+ Content = ReplaceMacro (Content,
+ GlobalMacros,
+ False,
+ (LineContent, LineNo),
+ FileName,
+ Flag)
+
+ MacroUsed = gMACRO_PATTERN.findall(Content)
+ #
+ # no macro found in String, stop replacing
+ #
+ if len(MacroUsed) == 0:
+ return Content
+ else:
+ for Macro in MacroUsed:
+ gQuotedMacro = re.compile(".*\".*\$\(%s\).*\".*"%(Macro))
+ if not gQuotedMacro.match(Content):
+ #
+ # Still have MACROs can't be expanded.
+ #
+ ErrorInInf (ERR_MARCO_DEFINITION_MISS_ERROR,
+ LineInfo=NewLineInfo)
+
+ return Content
+
+
+## IsBinaryInf
+#
+# Judge whether the INF file is Binary INF or Common INF
+#
+# @param FileLineList A list contain all INF file content.
+#
+def IsBinaryInf(FileLineList):
+ if not FileLineList:
+ return False
+
+ ReIsSourcesSection = re.compile("^\s*\[Sources.*\]\s.*$", re.IGNORECASE)
+ ReIsBinarySection = re.compile("^\s*\[Binaries.*\]\s.*$", re.IGNORECASE)
+ BinarySectionFoundFlag = False
+
+ for Line in FileLineList:
+ if ReIsSourcesSection.match(Line):
+ return False
+ if ReIsBinarySection.match(Line):
+ BinarySectionFoundFlag = True
+
+ if BinarySectionFoundFlag:
+ return True
+
+ return False
+
+
+## IsLibInstanceInfo
+#
+# Judge whether the string contain the information of ## @LIB_INSTANCES.
+#
+# @param String
+#
+# @return Flag
+#
+def IsLibInstanceInfo(String):
+ ReIsLibInstance = re.compile("^\s*##\s*@LIB_INSTANCES\s*$")
+ if ReIsLibInstance.match(String):
+ return True
+ else:
+ return False
+
+
+## IsAsBuildOptionInfo
+#
+# Judge whether the string contain the information of ## @ASBUILD.
+#
+# @param String
+#
+# @return Flag
+#
+def IsAsBuildOptionInfo(String):
+ ReIsAsBuildInstance = re.compile("^\s*##\s*@AsBuilt\s*$")
+ if ReIsAsBuildInstance.match(String):
+ return True
+ else:
+ return False
+
+
+class InfParserSectionRoot(object):
+ def __init__(self):
+ #
+ # Macros defined in [Define] section are file scope global
+ #
+ self.FileLocalMacros = {}
+
+ #
+ # Current Section Header content.
+ #
+ self.SectionHeaderContent = []
+
+ #
+ # Last time Section Header content.
+ #
+ self.LastSectionHeaderContent = []
+
+ self.FullPath = ''
+
+ self.InfDefSection = None
+ self.InfBuildOptionSection = None
+ self.InfLibraryClassSection = None
+ self.InfPackageSection = None
+ self.InfPcdSection = None
+ self.InfSourcesSection = None
+ self.InfUserExtensionSection = None
+ self.InfProtocolSection = None
+ self.InfPpiSection = None
+ self.InfGuidSection = None
+ self.InfDepexSection = None
+ self.InfPeiDepexSection = None
+ self.InfDxeDepexSection = None
+ self.InfSmmDepexSection = None
+ self.InfBinariesSection = None
+ self.InfHeader = None
+ self.InfSpecialCommentSection = None
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py
new file mode 100755
index 00000000..e930398d
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py
@@ -0,0 +1,178 @@
+## @file
+# This file contained the parser for [Pcds] sections in INF file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+'''
+InfPcdSectionParser
+'''
+##
+# Import Modules
+#
+
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger.ToolError import FORMAT_INVALID
+from Parser.InfParserMisc import InfExpandMacro
+from Library import DataType as DT
+from Library.Parsing import MacroParser
+from Library.Misc import GetSplitValueList
+from Library import GlobalData
+from Library.StringUtils import SplitPcdEntry
+from Parser.InfParserMisc import InfParserSectionRoot
+
+class InfPcdSectionParser(InfParserSectionRoot):
+ ## Section PCD related parser
+ #
+ # For 5 types of PCD list below, all use this function.
+ # 'FixedPcd', 'FeaturePcd', 'PatchPcd', 'Pcd', 'PcdEx'
+ #
+ # This is a INF independent parser, the validation in this parser only
+ # cover
+ # INF spec scope, will not cross DEC/DSC to check pcd value
+ #
+ def InfPcdParser(self, SectionString, InfSectionObject, FileName):
+ KeysList = []
+ PcdList = []
+ CommentsList = []
+ ValueList = []
+ #
+ # Current section archs
+ #
+ LineIndex = -1
+ for Item in self.LastSectionHeaderContent:
+ if (Item[0], Item[1], Item[3]) not in KeysList:
+ KeysList.append((Item[0], Item[1], Item[3]))
+ LineIndex = Item[3]
+
+ if (Item[0].upper() == DT.TAB_INF_FIXED_PCD.upper() or \
+ Item[0].upper() == DT.TAB_INF_FEATURE_PCD.upper() or \
+ Item[0].upper() == DT.TAB_INF_PCD.upper()) and GlobalData.gIS_BINARY_INF:
+ Logger.Error('InfParser', FORMAT_INVALID, ST.ERR_ASBUILD_PCD_SECTION_TYPE%("\"" + Item[0] + "\""),
+ File=FileName, Line=LineIndex)
+
+ #
+ # For Common INF file
+ #
+ if not GlobalData.gIS_BINARY_INF:
+ #
+ # Macro defined in this section
+ #
+ SectionMacros = {}
+ for Line in SectionString:
+ PcdLineContent = Line[0]
+ PcdLineNo = Line[1]
+ if PcdLineContent.strip() == '':
+ CommentsList = []
+ continue
+
+ if PcdLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
+ CommentsList.append(Line)
+ continue
+ else:
+ #
+ # Encounter a PCD entry
+ #
+ if PcdLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
+ CommentsList.append((
+ PcdLineContent[PcdLineContent.find(DT.TAB_COMMENT_SPLIT):],
+ PcdLineNo))
+ PcdLineContent = PcdLineContent[:PcdLineContent.find(DT.TAB_COMMENT_SPLIT)]
+
+ if PcdLineContent != '':
+ #
+ # Find Macro
+ #
+ Name, Value = MacroParser((PcdLineContent, PcdLineNo),
+ FileName,
+ DT.MODEL_EFI_PCD,
+ self.FileLocalMacros)
+ if Name is not None:
+ SectionMacros[Name] = Value
+ ValueList = []
+ CommentsList = []
+ continue
+
+ PcdEntryReturn = SplitPcdEntry(PcdLineContent)
+
+ if not PcdEntryReturn[1]:
+ TokenList = ['']
+ else:
+ TokenList = PcdEntryReturn[0]
+
+ ValueList[0:len(TokenList)] = TokenList
+
+ #
+ # Replace with Local section Macro and [Defines] section Macro.
+ #
+ ValueList = [InfExpandMacro(Value, (FileName, PcdLineContent, PcdLineNo),
+ self.FileLocalMacros, SectionMacros, True)
+ for Value in ValueList]
+
+ if len(ValueList) >= 1:
+ PcdList.append((ValueList, CommentsList, (PcdLineContent, PcdLineNo, FileName)))
+ ValueList = []
+ CommentsList = []
+ continue
+ #
+ # For Binary INF file
+ #
+ else:
+ for Line in SectionString:
+ LineContent = Line[0].strip()
+ LineNo = Line[1]
+
+ if LineContent == '':
+ CommentsList = []
+ continue
+
+ if LineContent.startswith(DT.TAB_COMMENT_SPLIT):
+ CommentsList.append(LineContent)
+ continue
+ #
+ # Have comments at tail.
+ #
+ CommentIndex = LineContent.find(DT.TAB_COMMENT_SPLIT)
+ if CommentIndex > -1:
+ CommentsList.append(LineContent[CommentIndex+1:])
+ LineContent = LineContent[:CommentIndex]
+
+ TokenList = GetSplitValueList(LineContent, DT.TAB_VALUE_SPLIT)
+ #
+ # PatchablePcd
+ # TokenSpace.CName | Value | Offset
+ #
+ if KeysList[0][0].upper() == DT.TAB_INF_PATCH_PCD.upper():
+ if len(TokenList) != 3:
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_ASBUILD_PATCHPCD_FORMAT_INVALID,
+ File=FileName,
+ Line=LineNo,
+ ExtraData=LineContent)
+ #
+ elif KeysList[0][0].upper() == DT.TAB_INF_PCD_EX.upper():
+ if len(TokenList) != 1:
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_ASBUILD_PCDEX_FORMAT_INVALID,
+ File=FileName,
+ Line=LineNo,
+ ExtraData=LineContent)
+ ValueList[0:len(TokenList)] = TokenList
+ if len(ValueList) >= 1:
+ PcdList.append((ValueList, CommentsList, (LineContent, LineNo, FileName)))
+ ValueList = []
+ CommentsList = []
+ continue
+
+ if not InfSectionObject.SetPcds(PcdList, KeysList = KeysList,
+ PackageInfo = self.InfPackageSection.GetPackages()):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR%("[PCD]"),
+ File=FileName,
+ Line=LineIndex)
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfSectionParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfSectionParser.py
new file mode 100755
index 00000000..9a0cd714
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfSectionParser.py
@@ -0,0 +1,493 @@
+## @file
+# This file contained the parser for sections in INF file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+InfSectionParser
+'''
+##
+# Import Modules
+#
+from copy import deepcopy
+import re
+
+from Library.StringUtils import GetSplitValueList
+from Library.CommentParsing import ParseHeaderCommentSection
+from Library.CommentParsing import ParseComment
+
+from Library import DataType as DT
+
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger.ToolError import FORMAT_INVALID
+
+from Object.Parser.InfDefineObject import InfDefObject
+from Object.Parser.InfBuildOptionObject import InfBuildOptionsObject
+from Object.Parser.InfLibraryClassesObject import InfLibraryClassObject
+from Object.Parser.InfPackagesObject import InfPackageObject
+from Object.Parser.InfPcdObject import InfPcdObject
+from Object.Parser.InfSoucesObject import InfSourcesObject
+from Object.Parser.InfUserExtensionObject import InfUserExtensionObject
+from Object.Parser.InfProtocolObject import InfProtocolObject
+from Object.Parser.InfPpiObject import InfPpiObject
+from Object.Parser.InfGuidObject import InfGuidObject
+from Object.Parser.InfDepexObject import InfDepexObject
+from Object.Parser.InfBinaryObject import InfBinariesObject
+from Object.Parser.InfHeaderObject import InfHeaderObject
+from Object.Parser.InfMisc import InfSpecialCommentObject
+from Object.Parser.InfMisc import InfHobObject
+from Object.Parser.InfMisc import InfBootModeObject
+from Object.Parser.InfMisc import InfEventObject
+from Parser.InfParserMisc import gINF_SECTION_DEF
+from Parser.InfDefineSectionParser import InfDefinSectionParser
+from Parser.InfBuildOptionSectionParser import InfBuildOptionSectionParser
+from Parser.InfSourceSectionParser import InfSourceSectionParser
+from Parser.InfLibrarySectionParser import InfLibrarySectionParser
+from Parser.InfPackageSectionParser import InfPackageSectionParser
+from Parser.InfGuidPpiProtocolSectionParser import InfGuidPpiProtocolSectionParser
+from Parser.InfBinarySectionParser import InfBinarySectionParser
+from Parser.InfPcdSectionParser import InfPcdSectionParser
+from Parser.InfDepexSectionParser import InfDepexSectionParser
+
+## GetSpecialStr2
+#
+# GetSpecialStr2
+#
+def GetSpecialStr2(ItemList, FileName, LineNo, SectionString):
+ Str2 = ''
+ #
+ # S2 may be Platform or ModuleType
+ #
+ if len(ItemList) == 3:
+ #
+ # Except [LibraryClass], [Depex]
+ # section can has more than 2 items in section header string,
+ # others should report error.
+ #
+ if not (ItemList[0].upper() == DT.TAB_LIBRARY_CLASSES.upper() or \
+ ItemList[0].upper() == DT.TAB_DEPEX.upper() or \
+ ItemList[0].upper() == DT.TAB_USER_EXTENSIONS.upper()):
+ if ItemList[2] != '':
+ Logger.Error('Parser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_SOURCE_SECTION_SECTIONNAME_INVALID % (SectionString),
+ File=FileName,
+ Line=LineNo,
+ ExtraData=SectionString)
+ Str2 = ItemList[2]
+ elif len(ItemList) == 4:
+ #
+ # Except [UserExtension]
+ # section can has 4 items in section header string,
+ # others should report error.
+ #
+ if not ItemList[0].upper() == DT.TAB_USER_EXTENSIONS.upper() or ItemList[0].upper() == DT.TAB_DEPEX.upper():
+ if ItemList[3] != '':
+ Logger.Error('Parser', FORMAT_INVALID, ST.ERR_INF_PARSER_SOURCE_SECTION_SECTIONNAME_INVALID \
+ % (SectionString), File=FileName, Line=LineNo, ExtraData=SectionString)
+
+ if not ItemList[0].upper() == DT.TAB_USER_EXTENSIONS.upper():
+ Str2 = ItemList[2] + ' | ' + ItemList[3]
+ else:
+ Str2 = ItemList[2]
+
+ elif len(ItemList) > 4:
+ Logger.Error('Parser', FORMAT_INVALID, ST.ERR_INF_PARSER_SOURCE_SECTION_SECTIONNAME_INVALID \
+ % (SectionString), File=FileName, Line=LineNo, ExtraData=SectionString)
+
+ return Str2
+
+## ProcessUseExtHeader
+#
+#
+def ProcessUseExtHeader(ItemList):
+ NewItemList = []
+ AppendContent = ''
+ CompleteFlag = False
+ for Item in ItemList:
+ if Item.startswith('\"') and not Item.endswith('\"'):
+ AppendContent = Item
+ CompleteFlag = True
+ elif Item.endswith('\"') and not Item.startswith('\"'):
+ #
+ # Should not have an userId or IdString not starts with " before but ends with ".
+ #
+ if not CompleteFlag:
+ return False, []
+ AppendContent = AppendContent + "." + Item
+ NewItemList.append(AppendContent)
+ CompleteFlag = False
+ AppendContent = ''
+ elif Item.endswith('\"') and Item.startswith('\"'):
+ #
+ # Common item, not need to combine the information
+ #
+ NewItemList.append(Item)
+ else:
+ if not CompleteFlag:
+ NewItemList.append(Item)
+ else:
+ AppendContent = AppendContent + "." + Item
+
+ if len(NewItemList) > 4:
+ return False, []
+
+ return True, NewItemList
+
+## GetArch
+#
+# GetArch
+#
+def GetArch(ItemList, ArchList, FileName, LineNo, SectionString):
+ #
+ # S1 is always Arch
+ #
+ if len(ItemList) > 1:
+ Arch = ItemList[1]
+ else:
+ Arch = 'COMMON'
+ ArchList.add(Arch)
+
+ #
+ # 'COMMON' must not be used with specific ARCHs at the same section
+ #
+ if 'COMMON' in ArchList and len(ArchList) > 1:
+ Logger.Error('Parser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_SECTION_ARCH_CONFLICT,
+ File=FileName,
+ Line=LineNo,
+ ExtraData=SectionString)
+
+ return Arch, ArchList
+
+## InfSectionParser
+#
+# Inherit from object
+#
+class InfSectionParser(InfDefinSectionParser,
+ InfBuildOptionSectionParser,
+ InfSourceSectionParser,
+ InfLibrarySectionParser,
+ InfPackageSectionParser,
+ InfGuidPpiProtocolSectionParser,
+ InfBinarySectionParser,
+ InfPcdSectionParser,
+ InfDepexSectionParser):
+ #
+ # Parser objects used to implement singleton
+ #
+ MetaFiles = {}
+
+ ## Factory method
+ #
+ # One file, one parser object. This factory method makes sure that there's
+ # only one object constructed for one meta file.
+ #
+ # @param Class class object of real AutoGen class
+ # (InfParser, DecParser or DscParser)
+ # @param FilePath The path of meta file
+ #
+ def __new__(cls, FilePath, *args, **kwargs):
+ if args:
+ pass
+ if kwargs:
+ pass
+ if FilePath in cls.MetaFiles:
+ return cls.MetaFiles[FilePath]
+ else:
+ ParserObject = super(InfSectionParser, cls).__new__(cls)
+ cls.MetaFiles[FilePath] = ParserObject
+ return ParserObject
+
+ def __init__(self):
+ InfDefinSectionParser.__init__(self)
+ InfBuildOptionSectionParser.__init__(self)
+ InfSourceSectionParser.__init__(self)
+ InfLibrarySectionParser.__init__(self)
+ InfPackageSectionParser.__init__(self)
+ InfGuidPpiProtocolSectionParser.__init__(self)
+ InfBinarySectionParser.__init__(self)
+ InfPcdSectionParser.__init__(self)
+ InfDepexSectionParser.__init__(self)
+ #
+ # Initialize all objects that an INF file will generated.
+ #
+ self.InfDefSection = InfDefObject()
+ self.InfBuildOptionSection = InfBuildOptionsObject()
+ self.InfLibraryClassSection = InfLibraryClassObject()
+ self.InfPackageSection = InfPackageObject()
+ self.InfPcdSection = InfPcdObject(list(self.MetaFiles.keys())[0])
+ self.InfSourcesSection = InfSourcesObject()
+ self.InfUserExtensionSection = InfUserExtensionObject()
+ self.InfProtocolSection = InfProtocolObject()
+ self.InfPpiSection = InfPpiObject()
+ self.InfGuidSection = InfGuidObject()
+ self.InfDepexSection = InfDepexObject()
+ self.InfPeiDepexSection = InfDepexObject()
+ self.InfDxeDepexSection = InfDepexObject()
+ self.InfSmmDepexSection = InfDepexObject()
+ self.InfBinariesSection = InfBinariesObject()
+ self.InfHeader = InfHeaderObject()
+ self.InfBinaryHeader = InfHeaderObject()
+ self.InfSpecialCommentSection = InfSpecialCommentObject()
+
+ #
+ # A List for store define section content.
+ #
+ self._PcdNameList = []
+ self._SectionName = ''
+ self._SectionType = 0
+ self.RelaPath = ''
+ self.FileName = ''
+
+ #
+ # File Header content parser
+ #
+ def InfHeaderParser(self, Content, InfHeaderObject2, FileName, IsBinaryHeader = False):
+ if IsBinaryHeader:
+ (Abstract, Description, Copyright, License) = ParseHeaderCommentSection(Content, FileName, True)
+ if not Abstract or not Description or not Copyright or not License:
+ Logger.Error('Parser',
+ FORMAT_INVALID,
+ ST.ERR_INVALID_BINARYHEADER_FORMAT,
+ File=FileName)
+ else:
+ (Abstract, Description, Copyright, License) = ParseHeaderCommentSection(Content, FileName)
+ #
+ # Not process file name now, for later usage.
+ #
+ if self.FileName:
+ pass
+
+ #
+ # Insert Abstract, Description, CopyRight, License into header object
+ #
+ InfHeaderObject2.SetAbstract(Abstract)
+ InfHeaderObject2.SetDescription(Description)
+ InfHeaderObject2.SetCopyright(Copyright)
+ InfHeaderObject2.SetLicense(License)
+
+
+
+
+ ## Section header parser
+ #
+ # The section header is always in following format:
+ #
+ # [section_name.arch<.platform|module_type>]
+ #
+ # @param String A string contained the content need to be parsed.
+ #
+ def SectionHeaderParser(self, SectionString, FileName, LineNo):
+ _Scope = []
+ _SectionName = ''
+ ArchList = set()
+ _ValueList = []
+ _PcdNameList = [DT.TAB_INF_FIXED_PCD.upper(),
+ DT.TAB_INF_FEATURE_PCD.upper(),
+ DT.TAB_INF_PATCH_PCD.upper(),
+ DT.TAB_INF_PCD.upper(),
+ DT.TAB_INF_PCD_EX.upper()
+ ]
+ SectionString = SectionString.strip()
+ for Item in GetSplitValueList(SectionString[1:-1], DT.TAB_COMMA_SPLIT):
+ if Item == '':
+ Logger.Error('Parser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % (""),
+ File=FileName,
+ Line=LineNo,
+ ExtraData=SectionString)
+ ItemList = GetSplitValueList(Item, DT.TAB_SPLIT)
+ #
+ # different section should not mix in one section
+ # Allow different PCD type sections mixed together
+ #
+ if _SectionName.upper() not in _PcdNameList:
+ if _SectionName != '' and _SectionName.upper() != ItemList[0].upper():
+ Logger.Error('Parser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_SECTION_NAME_DUPLICATE,
+ File=FileName,
+ Line=LineNo,
+ ExtraData=SectionString)
+ elif _PcdNameList[1] in [_SectionName.upper(), ItemList[0].upper()] and \
+ (_SectionName.upper()!= ItemList[0].upper()):
+ Logger.Error('Parser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % (""),
+ File=FileName,
+ Line=LineNo,
+ ExtraData=SectionString)
+
+ _SectionName = ItemList[0]
+ if _SectionName.upper() in gINF_SECTION_DEF:
+ self._SectionType = gINF_SECTION_DEF[_SectionName.upper()]
+ else:
+ self._SectionType = DT.MODEL_UNKNOWN
+ Logger.Error("Parser",
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_UNKNOWN_SECTION,
+ File=FileName,
+ Line=LineNo,
+ ExtraData=SectionString)
+
+ #
+ # Get Arch
+ #
+ Str1, ArchList = GetArch(ItemList, ArchList, FileName, LineNo, SectionString)
+
+ #
+ # For [Defines] section, do special check.
+ #
+ if ItemList[0].upper() == DT.TAB_COMMON_DEFINES.upper():
+ if len(ItemList) != 1:
+ Logger.Error('Parser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID % (SectionString),
+ File=FileName, Line=LineNo, ExtraData=SectionString)
+
+ #
+ # For [UserExtension] section, do special check.
+ #
+ if ItemList[0].upper() == DT.TAB_USER_EXTENSIONS.upper():
+
+ RetValue = ProcessUseExtHeader(ItemList)
+
+ if not RetValue[0]:
+ Logger.Error('Parser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID % (SectionString),
+ File=FileName, Line=LineNo, ExtraData=SectionString)
+ else:
+ ItemList = RetValue[1]
+
+ if len(ItemList) == 3:
+ ItemList.append('COMMON')
+
+ Str1 = ItemList[1]
+
+ #
+ # For Library classes, need to check module type.
+ #
+ if ItemList[0].upper() == DT.TAB_LIBRARY_CLASSES.upper() and len(ItemList) == 3:
+ if ItemList[2] != '':
+ ModuleTypeList = GetSplitValueList(ItemList[2], DT.TAB_VALUE_SPLIT)
+ for Item in ModuleTypeList:
+ if Item.strip() not in DT.MODULE_LIST:
+ Logger.Error('Parser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_MODULETYPE_INVALID % (Item),
+ File=FileName,
+ Line=LineNo,
+ ExtraData=SectionString)
+ #
+ # GetSpecialStr2
+ #
+ Str2 = GetSpecialStr2(ItemList, FileName, LineNo, SectionString)
+
+ _Scope.append([Str1, Str2])
+
+ _NewValueList = []
+ _AppendFlag = True
+ if _SectionName.upper() in _PcdNameList:
+ for ValueItem in _ValueList:
+ if _SectionName.upper() == ValueItem[0].upper() and Str1.upper() not in ValueItem[1].split():
+ ValueItem[1] = ValueItem[1] + " " + Str1
+ _AppendFlag = False
+ elif _SectionName.upper() == ValueItem[0].upper() and Str1.upper() in ValueItem[1].split():
+ _AppendFlag = False
+
+ _NewValueList.append(ValueItem)
+
+ _ValueList = _NewValueList
+
+ if _AppendFlag:
+ if not ItemList[0].upper() == DT.TAB_USER_EXTENSIONS.upper():
+ _ValueList.append([_SectionName, Str1, Str2, LineNo])
+ else:
+ if len(ItemList) == 4:
+ _ValueList.append([_SectionName, Str1, Str2, ItemList[3], LineNo])
+
+ self.SectionHeaderContent = deepcopy(_ValueList)
+
+ ## GenSpecialSectionList
+ #
+ # @param SpecialSectionList: a list of list, of which item's format
+ # (Comment, LineNum)
+ # @param ContainerFile: Input value for filename of Inf file
+ #
+ def InfSpecialCommentParser (self, SpecialSectionList, InfSectionObject, ContainerFile, SectionType):
+ ReFindSpecialCommentRe = re.compile(r"""#(?:\s*)\[(.*?)\](?:.*)""", re.DOTALL)
+ ReFindHobArchRe = re.compile(r"""[Hh][Oo][Bb]\.([^,]*)""", re.DOTALL)
+ if self.FileName:
+ pass
+ SpecialObjectList = []
+ ArchList = []
+ if SectionType == DT.TYPE_EVENT_SECTION:
+ TokenDict = DT.EVENT_TOKENS
+ elif SectionType == DT.TYPE_HOB_SECTION:
+ TokenDict = DT.HOB_TOKENS
+ else:
+ TokenDict = DT.BOOTMODE_TOKENS
+
+ for List in SpecialSectionList:
+ #
+ # Hob has Arch attribute, need to be handled specially here
+ #
+ if SectionType == DT.TYPE_HOB_SECTION:
+
+ MatchObject = ReFindSpecialCommentRe.search(List[0][0])
+ HobSectionStr = MatchObject.group(1)
+ ArchList = []
+ for Match in ReFindHobArchRe.finditer(HobSectionStr):
+ Arch = Match.groups(1)[0].upper()
+ ArchList.append(Arch)
+ CommentSoFar = ''
+ for Index in range(1, len(List)):
+ Result = ParseComment(List[Index], DT.ALL_USAGE_TOKENS, TokenDict, [], False)
+ Usage = Result[0]
+ Type = Result[1]
+ HelpText = Result[3]
+
+ if Usage == DT.ITEM_UNDEFINED and Type == DT.ITEM_UNDEFINED:
+ if HelpText is None:
+ HelpText = ''
+ if not HelpText.endswith('\n'):
+ HelpText += '\n'
+ CommentSoFar += HelpText
+ else:
+ if HelpText:
+ CommentSoFar += HelpText
+ if SectionType == DT.TYPE_EVENT_SECTION:
+ SpecialObject = InfEventObject()
+ SpecialObject.SetEventType(Type)
+ SpecialObject.SetUsage(Usage)
+ SpecialObject.SetHelpString(CommentSoFar)
+ elif SectionType == DT.TYPE_HOB_SECTION:
+ SpecialObject = InfHobObject()
+ SpecialObject.SetHobType(Type)
+ SpecialObject.SetUsage(Usage)
+ SpecialObject.SetHelpString(CommentSoFar)
+ if len(ArchList) >= 1:
+ SpecialObject.SetSupArchList(ArchList)
+ else:
+ SpecialObject = InfBootModeObject()
+ SpecialObject.SetSupportedBootModes(Type)
+ SpecialObject.SetUsage(Usage)
+ SpecialObject.SetHelpString(CommentSoFar)
+
+ SpecialObjectList.append(SpecialObject)
+ CommentSoFar = ''
+ if not InfSectionObject.SetSpecialComments(SpecialObjectList,
+ SectionType):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % (SectionType),
+ ContainerFile
+ )
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py
new file mode 100755
index 00000000..ac61177a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py
@@ -0,0 +1,139 @@
+## @file
+# This file contained the parser for [Sources] sections in INF file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+'''
+InfSourceSectionParser
+'''
+##
+# Import Modules
+#
+
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger.ToolError import FORMAT_INVALID
+from Parser.InfParserMisc import InfExpandMacro
+from Library import DataType as DT
+from Library.Parsing import MacroParser
+from Library.Misc import GetSplitValueList
+from Object.Parser.InfCommonObject import InfLineCommentObject
+from Parser.InfParserMisc import InfParserSectionRoot
+
+class InfSourceSectionParser(InfParserSectionRoot):
+ ## InfSourceParser
+ #
+ #
+ def InfSourceParser(self, SectionString, InfSectionObject, FileName):
+ SectionMacros = {}
+ ValueList = []
+ SourceList = []
+ StillCommentFalg = False
+ HeaderComments = []
+ LineComment = None
+ SectionContent = ''
+ for Line in SectionString:
+ SrcLineContent = Line[0]
+ SrcLineNo = Line[1]
+
+ if SrcLineContent.strip() == '':
+ continue
+
+ #
+ # Found Header Comments
+ #
+ if SrcLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
+ #
+ # Last line is comments, and this line go on.
+ #
+ if StillCommentFalg:
+ HeaderComments.append(Line)
+ SectionContent += SrcLineContent + DT.END_OF_LINE
+ continue
+ #
+ # First time encounter comment
+ #
+ else:
+ #
+ # Clear original data
+ #
+ HeaderComments = []
+ HeaderComments.append(Line)
+ StillCommentFalg = True
+ SectionContent += SrcLineContent + DT.END_OF_LINE
+ continue
+ else:
+ StillCommentFalg = False
+
+ if len(HeaderComments) >= 1:
+ LineComment = InfLineCommentObject()
+ LineCommentContent = ''
+ for Item in HeaderComments:
+ LineCommentContent += Item[0] + DT.END_OF_LINE
+ LineComment.SetHeaderComments(LineCommentContent)
+
+ #
+ # Find Tail comment.
+ #
+ if SrcLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
+ TailComments = SrcLineContent[SrcLineContent.find(DT.TAB_COMMENT_SPLIT):]
+ SrcLineContent = SrcLineContent[:SrcLineContent.find(DT.TAB_COMMENT_SPLIT)]
+ if LineComment is None:
+ LineComment = InfLineCommentObject()
+ LineComment.SetTailComments(TailComments)
+
+ #
+ # Find Macro
+ #
+ Name, Value = MacroParser((SrcLineContent, SrcLineNo),
+ FileName,
+ DT.MODEL_EFI_SOURCE_FILE,
+ self.FileLocalMacros)
+ if Name is not None:
+ SectionMacros[Name] = Value
+ LineComment = None
+ HeaderComments = []
+ continue
+
+ #
+ # Replace with Local section Macro and [Defines] section Macro.
+ #
+ SrcLineContent = InfExpandMacro(SrcLineContent,
+ (FileName, SrcLineContent, SrcLineNo),
+ self.FileLocalMacros,
+ SectionMacros)
+
+ TokenList = GetSplitValueList(SrcLineContent, DT.TAB_VALUE_SPLIT, 4)
+ ValueList[0:len(TokenList)] = TokenList
+
+ #
+ # Store section content string after MACRO replaced.
+ #
+ SectionContent += SrcLineContent + DT.END_OF_LINE
+
+ SourceList.append((ValueList, LineComment,
+ (SrcLineContent, SrcLineNo, FileName)))
+ ValueList = []
+ LineComment = None
+ TailComments = ''
+ HeaderComments = []
+ continue
+
+ #
+ # Current section archs
+ #
+ ArchList = []
+ for Item in self.LastSectionHeaderContent:
+ if Item[1] not in ArchList:
+ ArchList.append(Item[1])
+ InfSectionObject.SetSupArchList(Item[1])
+
+ InfSectionObject.SetAllContent(SectionContent)
+ if not InfSectionObject.SetSources(SourceList, Arch = ArchList):
+ Logger.Error('InfParser',
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Sources]"),
+ File=FileName,
+ Line=Item[3])
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/__init__.py
new file mode 100644
index 00000000..d6922c14
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Parser/__init__.py
@@ -0,0 +1,14 @@
+## @file
+# Python 'Parser' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Parser
+'''
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py
new file mode 100755
index 00000000..354e493d
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py
@@ -0,0 +1,1000 @@
+## @file DecPomAlignment.py
+# This file contained the adapter for convert INF parser object to POM Object
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+DecPomAlignment
+'''
+from __future__ import print_function
+
+##
+# Import Modules
+#
+import os.path
+from os import sep
+import platform
+
+import re
+import Logger.Log as Logger
+from Logger import StringTable as ST
+from Logger.ToolError import UPT_MUL_DEC_ERROR
+from Logger.ToolError import FORMAT_INVALID
+
+from Library.Parsing import NormPath
+from Library.DataType import ARCH_LIST
+from Library.DataType import TAB_GUIDS
+from Library.DataType import TAB_PROTOCOLS
+from Library.DataType import TAB_PPIS
+from Library.DataType import TAB_DEC_DEFINES_PACKAGE_NAME
+from Library.DataType import TAB_DEC_DEFINES_PACKAGE_GUID
+from Library.DataType import TAB_DEC_DEFINES_PACKAGE_VERSION
+from Library.DataType import TAB_DEC_DEFINES_DEC_SPECIFICATION
+from Library.DataType import TAB_DEC_DEFINES_PKG_UNI_FILE
+from Library.DataType import TAB_ARCH_COMMON
+from Library.DataType import TAB_INCLUDES
+from Library.DataType import TAB_LIBRARY_CLASSES
+from Library.DataType import TAB_PCDS
+from Library.DataType import TAB_PCDS_FIXED_AT_BUILD_NULL
+from Library.DataType import TAB_PCDS_PATCHABLE_IN_MODULE_NULL
+from Library.DataType import TAB_PCDS_FEATURE_FLAG_NULL
+from Library.DataType import TAB_PCDS_DYNAMIC_EX_NULL
+from Library.DataType import TAB_PCDS_DYNAMIC_NULL
+from Library.DataType import TAB_PTR_TYPE_PCD
+from Library.DataType import ITEM_UNDEFINED
+from Library.DataType import TAB_DEC_BINARY_ABSTRACT
+from Library.DataType import TAB_DEC_BINARY_DESCRIPTION
+from Library.DataType import TAB_LANGUAGE_EN_US
+from Library.DataType import TAB_BINARY_HEADER_IDENTIFIER
+from Library.DataType import TAB_BINARY_HEADER_USERID
+from Library.DataType import TAB_LANGUAGE_EN_X
+from Library.DataType import TAB_LANGUAGE_EN
+from Library.DataType import TAB_STR_TOKENCNAME
+from Library.DataType import TAB_STR_TOKENPROMPT
+from Library.DataType import TAB_STR_TOKENHELP
+from Library.DataType import TAB_STR_TOKENERR
+from Library.DataType import TAB_HEX_START
+from Library.DataType import TAB_SPLIT
+import Library.DataType as DT
+from Library.CommentParsing import ParseHeaderCommentSection
+from Library.CommentParsing import ParseGenericComment
+from Library.CommentParsing import ParseDecPcdGenericComment
+from Library.CommentParsing import ParseDecPcdTailComment
+from Library.Misc import GetFiles
+from Library.Misc import Sdict
+from Library.Misc import GetRelativePath
+from Library.Misc import PathClass
+from Library.Misc import ValidateUNIFilePath
+from Library.UniClassObject import UniFileClassObject
+from Library.UniClassObject import ConvertSpecialUnicodes
+from Library.UniClassObject import GetLanguageCode1766
+from Library.ParserValidate import IsValidPath
+from Parser.DecParser import Dec
+from Object.POM.PackageObject import PackageObject
+from Object.POM.CommonObject import UserExtensionObject
+from Object.POM.CommonObject import IncludeObject
+from Object.POM.CommonObject import GuidObject
+from Object.POM.CommonObject import ProtocolObject
+from Object.POM.CommonObject import PpiObject
+from Object.POM.CommonObject import LibraryClassObject
+from Object.POM.CommonObject import PcdObject
+from Object.POM.CommonObject import TextObject
+from Object.POM.CommonObject import MiscFileObject
+from Object.POM.CommonObject import FileObject
+
+
+## DecPomAlignment
+#
+# Inherited from PackageObject
+#
+class DecPomAlignment(PackageObject):
+ def __init__(self, Filename, WorkspaceDir = None, CheckMulDec = False):
+ PackageObject.__init__(self)
+ self.UserExtensions = ''
+ self.WorkspaceDir = WorkspaceDir
+ self.SupArchList = ARCH_LIST
+ self.CheckMulDec = CheckMulDec
+ self.DecParser = None
+ self.UniFileClassObject = None
+ self.PcdDefaultValueDict = {}
+
+ #
+ # Load Dec file
+ #
+ self.LoadDecFile(Filename)
+
+ #
+ # Transfer to Package Object if IsToPackage is True
+ #
+ self.DecToPackage()
+
+ ## Load Dec file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Dec file
+ #
+ def LoadDecFile(self, Filename):
+ #
+ # Insert a record for file
+ #
+ Filename = NormPath(Filename)
+ (Path, Name) = os.path.split(Filename)
+ self.SetFullPath(Filename)
+ self.SetRelaPath(Path)
+ self.SetFileName(Name)
+ self.SetPackagePath(GetRelativePath(Path, self.WorkspaceDir))
+ self.SetCombinePath(GetRelativePath(Filename, self.WorkspaceDir))
+
+ self.DecParser = Dec(Filename)
+
+ ## Transfer to Package Object
+ #
+ # Transfer all contents of a Dec file to a standard Package Object
+ #
+ def DecToPackage(self):
+ #
+ # Init global information for the file
+ #
+ ContainerFile = self.GetFullPath()
+
+ #
+ # Generate Package Header
+ #
+ self.GenPackageHeader(ContainerFile)
+
+ #
+ # Generate Includes
+ #
+ self.GenIncludes(ContainerFile)
+
+ #
+ # Generate Guids
+ #
+ self.GenGuidProtocolPpis(TAB_GUIDS, ContainerFile)
+
+ #
+ # Generate Protocols
+ #
+ self.GenGuidProtocolPpis(TAB_PROTOCOLS, ContainerFile)
+
+ #
+ # Generate Ppis
+ #
+ self.GenGuidProtocolPpis(TAB_PPIS, ContainerFile)
+
+ #
+ # Generate LibraryClasses
+ #
+ self.GenLibraryClasses(ContainerFile)
+
+ #
+ # Generate Pcds
+ #
+ self.GenPcds(ContainerFile)
+
+ #
+ # Generate Module File list, will be used later on to generate
+ # distribution
+ #
+ self.GenModuleFileList(ContainerFile)
+
+ #
+ # Generate user extensions
+ #
+ self.GenUserExtensions()
+
+ ## Generate user extension
+ #
+ #
+ def GenUserExtensions(self):
+ UEObj = self.DecParser.GetUserExtensionSectionObject()
+ UEList = UEObj.GetAllUserExtensions()
+ for Item in UEList:
+ if not Item.UserString:
+ continue
+ UserExtension = UserExtensionObject()
+ UserId = Item.UserId
+ if UserId.startswith('"') and UserId.endswith('"'):
+ UserId = UserId[1:-1]
+ UserExtension.SetUserID(UserId)
+ Identifier = Item.IdString
+ if Identifier.startswith('"') and Identifier.endswith('"'):
+ Identifier = Identifier[1:-1]
+ #
+ # Generate miscellaneous files of DEC file
+ #
+ if UserId == 'TianoCore' and Identifier == 'ExtraFiles':
+ self.GenMiscFiles(Item.UserString)
+ UserExtension.SetIdentifier(Identifier)
+ UserExtension.SetStatement(Item.UserString)
+ UserExtension.SetSupArchList(
+ Item.ArchAndModuleType
+ )
+ self.SetUserExtensionList(
+ self.GetUserExtensionList() + [UserExtension]
+ )
+
+ # Add Private sections to UserExtension
+ if self.DecParser.GetPrivateSections():
+ PrivateUserExtension = UserExtensionObject()
+ PrivateUserExtension.SetStatement(self.DecParser.GetPrivateSections())
+ PrivateUserExtension.SetIdentifier(DT.TAB_PRIVATE)
+ PrivateUserExtension.SetUserID(DT.TAB_INTEL)
+ self.SetUserExtensionList(self.GetUserExtensionList() + [PrivateUserExtension])
+
+ ## Generate miscellaneous files on DEC file
+ #
+ #
+ def GenMiscFiles(self, Content):
+ MiscFileObj = MiscFileObject()
+ for Line in Content.splitlines():
+ FileName = ''
+ if '#' in Line:
+ FileName = Line[:Line.find('#')]
+ else:
+ FileName = Line
+ if FileName:
+ if IsValidPath(FileName, self.GetRelaPath()):
+ FileObj = FileObject()
+ FileObj.SetURI(FileName)
+ MiscFileObj.SetFileList(MiscFileObj.GetFileList()+[FileObj])
+ else:
+ Logger.Error("InfParser",
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(Line),
+ File=self.GetFileName(),
+ ExtraData=Line)
+ self.SetMiscFileList(self.GetMiscFileList()+[MiscFileObj])
+
+ ## Generate Package Header
+ #
+ # Gen Package Header of Dec as <Key> = <Value>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPackageHeader(self, ContainerFile):
+ Logger.Debug(2, "Generate PackageHeader ...")
+ DefinesDict = {}
+
+ #
+ # Update all defines item in database
+ #
+ DefObj = self.DecParser.GetDefineSectionObject()
+ for Item in DefObj.GetDefines():
+ #
+ # put items into Dict except for PackageName, Guid, Version, DEC_SPECIFICATION
+ #
+ SkipItemList = [TAB_DEC_DEFINES_PACKAGE_NAME, \
+ TAB_DEC_DEFINES_PACKAGE_GUID, TAB_DEC_DEFINES_PACKAGE_VERSION, \
+ TAB_DEC_DEFINES_DEC_SPECIFICATION, TAB_DEC_DEFINES_PKG_UNI_FILE]
+ if Item.Key in SkipItemList:
+ continue
+ DefinesDict['%s = %s' % (Item.Key, Item.Value)] = TAB_ARCH_COMMON
+
+ self.SetBaseName(DefObj.GetPackageName())
+ self.SetVersion(DefObj.GetPackageVersion())
+# self.SetName(DefObj.GetPackageName() + ' Version ' + \
+# DefObj.GetPackageVersion())
+ self.SetName(os.path.splitext(self.GetFileName())[0])
+ self.SetGuid(DefObj.GetPackageGuid())
+ if DefObj.GetPackageUniFile():
+ ValidateUNIFilePath(DefObj.GetPackageUniFile())
+ self.UniFileClassObject = \
+ UniFileClassObject([PathClass(os.path.join(DefObj.GetPackagePath(), DefObj.GetPackageUniFile()))])
+ else:
+ self.UniFileClassObject = None
+
+ if DefinesDict:
+ UserExtension = UserExtensionObject()
+ UserExtension.SetDefinesDict(DefinesDict)
+ UserExtension.SetIdentifier('DefineModifiers')
+ UserExtension.SetUserID('EDK2')
+ self.SetUserExtensionList(
+ self.GetUserExtensionList() + [UserExtension]
+ )
+
+ #
+ # Get File header information
+ #
+ if self.UniFileClassObject:
+ Lang = TAB_LANGUAGE_EN_X
+ else:
+ Lang = TAB_LANGUAGE_EN_US
+ Abstract, Description, Copyright, License = \
+ ParseHeaderCommentSection(self.DecParser.GetHeadComment(),
+ ContainerFile)
+ if Abstract:
+ self.SetAbstract((Lang, Abstract))
+ if Description:
+ self.SetDescription((Lang, Description))
+ if Copyright:
+ self.SetCopyright(('', Copyright))
+ if License:
+ self.SetLicense(('', License))
+
+ #
+ # Get Binary header information
+ #
+ if self.DecParser.BinaryHeadComment:
+ Abstract, Description, Copyright, License = \
+ ParseHeaderCommentSection(self.DecParser.BinaryHeadComment,
+ ContainerFile, True)
+
+ if not Abstract or not Description or not Copyright or not License:
+ Logger.Error('MkPkg',
+ FORMAT_INVALID,
+ ST.ERR_INVALID_BINARYHEADER_FORMAT,
+ ContainerFile)
+ else:
+ self.SetBinaryHeaderAbstract((Lang, Abstract))
+ self.SetBinaryHeaderDescription((Lang, Description))
+ self.SetBinaryHeaderCopyright(('', Copyright))
+ self.SetBinaryHeaderLicense(('', License))
+
+ BinaryAbstractList = []
+ BinaryDescriptionList = []
+
+ #Get Binary header from UNI file
+ # Initialize the UniStrDict dictionary, top keys are language codes
+ UniStrDict = {}
+ if self.UniFileClassObject:
+ UniStrDict = self.UniFileClassObject.OrderedStringList
+ for Lang in UniStrDict:
+ for StringDefClassObject in UniStrDict[Lang]:
+ Lang = GetLanguageCode1766(Lang)
+ if StringDefClassObject.StringName == TAB_DEC_BINARY_ABSTRACT:
+ if (Lang, ConvertSpecialUnicodes(StringDefClassObject.StringValue)) \
+ not in self.GetBinaryHeaderAbstract():
+ BinaryAbstractList.append((Lang, ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
+ if StringDefClassObject.StringName == TAB_DEC_BINARY_DESCRIPTION:
+ if (Lang, ConvertSpecialUnicodes(StringDefClassObject.StringValue)) \
+ not in self.GetBinaryHeaderDescription():
+ BinaryDescriptionList.append((Lang,
+ ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
+ #Combine Binary header from DEC file and UNI file
+ BinaryAbstractList = self.GetBinaryHeaderAbstract() + BinaryAbstractList
+ BinaryDescriptionList = self.GetBinaryHeaderDescription() + BinaryDescriptionList
+ BinaryCopyrightList = self.GetBinaryHeaderCopyright()
+ BinaryLicenseList = self.GetBinaryHeaderLicense()
+ #Generate the UserExtensionObject for TianoCore."BinaryHeader"
+ if BinaryAbstractList or BinaryDescriptionList or BinaryCopyrightList or BinaryLicenseList:
+ BinaryUserExtension = UserExtensionObject()
+ BinaryUserExtension.SetBinaryAbstract(BinaryAbstractList)
+ BinaryUserExtension.SetBinaryDescription(BinaryDescriptionList)
+ BinaryUserExtension.SetBinaryCopyright(BinaryCopyrightList)
+ BinaryUserExtension.SetBinaryLicense(BinaryLicenseList)
+ BinaryUserExtension.SetIdentifier(TAB_BINARY_HEADER_IDENTIFIER)
+ BinaryUserExtension.SetUserID(TAB_BINARY_HEADER_USERID)
+ self.SetUserExtensionList(self.GetUserExtensionList() + [BinaryUserExtension])
+
+
+ ## GenIncludes
+ #
+ # Gen Includes of Dec
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenIncludes(self, ContainerFile):
+ if ContainerFile:
+ pass
+ Logger.Debug(2, "Generate %s ..." % TAB_INCLUDES)
+ IncludesDict = Sdict()
+
+ IncObj = self.DecParser.GetIncludeSectionObject()
+ for Item in IncObj.GetAllIncludes():
+ IncludePath = os.path.normpath(Item.File)
+ if platform.system() != 'Windows' and platform.system() != 'Microsoft':
+ IncludePath = IncludePath.replace('\\', '/')
+ if IncludePath in IncludesDict:
+ if Item.GetArchList() == [TAB_ARCH_COMMON] or IncludesDict[IncludePath] == [TAB_ARCH_COMMON]:
+ IncludesDict[IncludePath] = [TAB_ARCH_COMMON]
+ else:
+ IncludesDict[IncludePath] = IncludesDict[IncludePath] + Item.GetArchList()
+ else:
+ IncludesDict[IncludePath] = Item.GetArchList()
+
+ #
+ # get the standardIncludeFileList(industry), packageIncludeFileList
+ # (others) for PackageObject
+ #
+ PackagePath = os.path.split(self.GetFullPath())[0]
+ IncludePathList = \
+ sorted([os.path.normpath(Path) + sep for Path in IncludesDict.keys()])
+
+ #
+ # get a non-overlap set of include path, IncludePathList should be
+ # sorted, and path should be end with path separator '\'
+ #
+ NonOverLapList = []
+ for Path1 in IncludePathList:
+ for Path2 in NonOverLapList:
+ if Path1.startswith(Path2):
+ break
+ else:
+ NonOverLapList.append(Path1)
+ #
+ # revert the list so the longest path shown first in list, also need
+ # to remove the extra path separator '\'
+ # as this list is used to search the supported Arch info
+ #
+ for IndexN in range (0, len(IncludePathList)):
+ IncludePathList[IndexN] = os.path.normpath(IncludePathList[IndexN])
+ IncludePathList.sort()
+ IncludePathList.reverse()
+ #
+ # save the include path list for later usage
+ #
+ self.SetIncludePathList(IncludePathList)
+ StandardIncludeFileList = []
+ PackageIncludeFileList = []
+
+ IncludeFileList = []
+ for Path in NonOverLapList:
+ FileList = GetFiles(os.path.join(PackagePath, Path), ['CVS', '.svn'], False)
+ IncludeFileList += [os.path.normpath(os.path.join(Path, File)) for File in FileList]
+ for Includefile in IncludeFileList:
+ ExtName = os.path.splitext(Includefile)[1]
+ if ExtName.upper() == '.DEC' and self.CheckMulDec:
+ Logger.Error('MkPkg',
+ UPT_MUL_DEC_ERROR,
+ ST.ERR_MUL_DEC_ERROR%(os.path.dirname(ContainerFile),
+ os.path.basename(ContainerFile),
+ Includefile))
+
+ FileCombinePath = os.path.dirname(Includefile)
+ Include = IncludeObject()
+ for Path in IncludePathList:
+ if FileCombinePath.startswith(Path):
+ SupArchList = IncludesDict[Path]
+ break
+ Include.SetFilePath(Includefile)
+ Include.SetSupArchList(SupArchList)
+ if Includefile.find('IndustryStandard') != -1:
+ StandardIncludeFileList.append(Include)
+ else:
+ PackageIncludeFileList.append(Include)
+
+ self.SetStandardIncludeFileList(StandardIncludeFileList)
+
+ #
+ # put include path into the PackageIncludeFileList
+ #
+ PackagePathList = []
+ IncObj = self.DecParser.GetIncludeSectionObject()
+ for Item in IncObj.GetAllIncludes():
+ IncludePath = Item.File
+ Include = IncludeObject()
+ Include.SetFilePath(IncludePath)
+ Include.SetSupArchList(Item.GetArchList())
+ PackagePathList.append(Include)
+ self.SetPackageIncludeFileList(PackagePathList + PackageIncludeFileList)
+
+ ## GenPpis
+ #
+ # Gen Ppis of Dec
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenGuidProtocolPpis(self, Type, ContainerFile):
+ if ContainerFile:
+ pass
+ Logger.Debug(2, "Generate %s ..." % Type)
+
+ Obj = None
+ Factory = None
+ if Type == TAB_GUIDS:
+ Obj = self.DecParser.GetGuidSectionObject()
+ def CreateGuidObject():
+ Object = GuidObject()
+ Object.SetGuidTypeList([])
+ Object.SetUsage(None)
+ Object.SetName(None)
+ return Object
+ Factory = CreateGuidObject
+ elif Type == TAB_PROTOCOLS:
+ Obj = self.DecParser.GetProtocolSectionObject()
+
+ def CreateProtocolObject():
+ return ProtocolObject()
+ Factory = CreateProtocolObject
+ elif Type == TAB_PPIS:
+ Obj = self.DecParser.GetPpiSectionObject()
+
+ def CreatePpiObject():
+ return PpiObject()
+ Factory = CreatePpiObject
+ else:
+ #
+ # Should not be here
+ #
+ return
+
+ DeclarationsList = []
+
+ #
+ # Go through each arch
+ #
+ for Item in Obj.GetGuidStyleAllItems():
+ Name = Item.GuidCName
+ Value = Item.GuidString
+ HelpTxt = ParseGenericComment(Item.GetHeadComment() + \
+ Item.GetTailComment())
+
+ ListObject = Factory()
+ ListObject.SetCName(Name)
+ ListObject.SetGuid(Value)
+ ListObject.SetSupArchList(Item.GetArchList())
+ if HelpTxt:
+ if self.UniFileClassObject:
+ HelpTxt.SetLang(TAB_LANGUAGE_EN_X)
+ ListObject.SetHelpTextList([HelpTxt])
+
+ DeclarationsList.append(ListObject)
+
+ #
+ #GuidTypeList is abstracted from help
+ #
+ if Type == TAB_GUIDS:
+ self.SetGuidList(self.GetGuidList() + DeclarationsList)
+ elif Type == TAB_PROTOCOLS:
+ self.SetProtocolList(self.GetProtocolList() + DeclarationsList)
+ elif Type == TAB_PPIS:
+ self.SetPpiList(self.GetPpiList() + DeclarationsList)
+
+ ## GenLibraryClasses
+ #
+ # Gen LibraryClasses of Dec
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenLibraryClasses(self, ContainerFile):
+ if ContainerFile:
+ pass
+ Logger.Debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
+ LibraryClassDeclarations = []
+
+ LibObj = self.DecParser.GetLibraryClassSectionObject()
+ for Item in LibObj.GetAllLibraryclasses():
+ LibraryClass = LibraryClassObject()
+ LibraryClass.SetLibraryClass(Item.Libraryclass)
+ LibraryClass.SetSupArchList(Item.GetArchList())
+ LibraryClass.SetIncludeHeader(Item.File)
+ HelpTxt = ParseGenericComment(Item.GetHeadComment() + \
+ Item.GetTailComment(), None, '@libraryclass')
+ if HelpTxt:
+ if self.UniFileClassObject:
+ HelpTxt.SetLang(TAB_LANGUAGE_EN_X)
+ LibraryClass.SetHelpTextList([HelpTxt])
+ LibraryClassDeclarations.append(LibraryClass)
+
+ self.SetLibraryClassList(self.GetLibraryClassList() + \
+ LibraryClassDeclarations)
+
+ ## GenPcds
+ #
+ # Gen Pcds of Dec
+ # <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPcds(self, ContainerFile):
+ Logger.Debug(2, "Generate %s ..." % TAB_PCDS)
+ PcdObj = self.DecParser.GetPcdSectionObject()
+ #
+ # Get all Pcds
+ #
+ PcdDeclarations = []
+ IterList = [
+ (TAB_PCDS_FIXED_AT_BUILD_NULL, 'FixedPcd'),
+ (TAB_PCDS_PATCHABLE_IN_MODULE_NULL, 'PatchPcd'),
+ (TAB_PCDS_FEATURE_FLAG_NULL, 'FeaturePcd'),
+ (TAB_PCDS_DYNAMIC_EX_NULL, 'PcdEx'),
+ (TAB_PCDS_DYNAMIC_NULL, 'Pcd')]
+
+ PromptStrList = []
+ HelpStrList = []
+ PcdErrStrList = []
+ # Initialize UniStrDict dictionary, top keys are language codes
+ UniStrDict = {}
+ StrList = []
+
+ Language = ''
+ if self.UniFileClassObject:
+ Language = TAB_LANGUAGE_EN_X
+ else:
+ Language = TAB_LANGUAGE_EN_US
+
+ if self.UniFileClassObject:
+ UniStrDict = self.UniFileClassObject.OrderedStringList
+ for Lang in UniStrDict:
+ for StringDefClassObject in UniStrDict[Lang]:
+ StrList = StringDefClassObject.StringName.split('_')
+ # StringName format is STR_<TOKENSPACECNAME>_<PCDCNAME>_PROMPT
+ if len(StrList) == 4 and StrList[0] == TAB_STR_TOKENCNAME and StrList[3] == TAB_STR_TOKENPROMPT:
+ PromptStrList.append((GetLanguageCode1766(Lang), StringDefClassObject.StringName, \
+ StringDefClassObject.StringValue))
+ # StringName format is STR_<TOKENSPACECNAME>_<PCDCNAME>_HELP
+ if len(StrList) == 4 and StrList[0] == TAB_STR_TOKENCNAME and StrList[3] == TAB_STR_TOKENHELP:
+ HelpStrList.append((GetLanguageCode1766(Lang), StringDefClassObject.StringName, \
+ StringDefClassObject.StringValue))
+ # StringName format is STR_<TOKENSPACECNAME>_ERR_##
+ if len(StrList) == 4 and StrList[0] == TAB_STR_TOKENCNAME and StrList[2] == TAB_STR_TOKENERR:
+ PcdErrStrList.append((GetLanguageCode1766(Lang), StringDefClassObject.StringName, \
+ StringDefClassObject.StringValue))
+ #
+ # For each PCD type
+ #
+ for PcdType, Type in IterList:
+ #
+ # Go through all archs
+ #
+ # for Arch in self.SupArchList + [TAB_ARCH_COMMON]:
+ #
+ for Item in PcdObj.GetPcdsByType(PcdType.upper()):
+ PcdDeclaration = GenPcdDeclaration(
+ ContainerFile,
+ (Item.TokenSpaceGuidCName, Item.TokenCName,
+ Item.DefaultValue, Item.DatumType, Item.TokenValue,
+ Type, Item.GetHeadComment(), Item.GetTailComment(), ''),
+ Language,
+ self.DecParser.GetDefineSectionMacro()
+ )
+ PcdDeclaration.SetSupArchList(Item.GetArchListOfType(PcdType))
+
+ #
+ # Get PCD error message from PCD error comment section in DEC file
+ #
+ for PcdErr in PcdDeclaration.GetPcdErrorsList():
+ if (PcdDeclaration.GetTokenSpaceGuidCName(), PcdErr.GetErrorNumber()) \
+ in self.DecParser.PcdErrorCommentDict:
+ Key = (PcdDeclaration.GetTokenSpaceGuidCName(), PcdErr.GetErrorNumber())
+ PcdErr.SetErrorMessageList(PcdErr.GetErrorMessageList() + \
+ [(Language, self.DecParser.PcdErrorCommentDict[Key])])
+
+ for Index in range(0, len(PromptStrList)):
+ StrNameList = PromptStrList[Index][1].split('_')
+ if StrNameList[1].lower() == Item.TokenSpaceGuidCName.lower() and \
+ StrNameList[2].lower() == Item.TokenCName.lower():
+ TxtObj = TextObject()
+ TxtObj.SetLang(PromptStrList[Index][0])
+ TxtObj.SetString(PromptStrList[Index][2])
+ for Prompt in PcdDeclaration.GetPromptList():
+ if Prompt.GetLang() == TxtObj.GetLang() and \
+ Prompt.GetString() == TxtObj.GetString():
+ break
+ else:
+ PcdDeclaration.SetPromptList(PcdDeclaration.GetPromptList() + [TxtObj])
+
+ for Index in range(0, len(HelpStrList)):
+ StrNameList = HelpStrList[Index][1].split('_')
+ if StrNameList[1].lower() == Item.TokenSpaceGuidCName.lower() and \
+ StrNameList[2].lower() == Item.TokenCName.lower():
+ TxtObj = TextObject()
+ TxtObj.SetLang(HelpStrList[Index][0])
+ TxtObj.SetString(HelpStrList[Index][2])
+ for HelpStrObj in PcdDeclaration.GetHelpTextList():
+ if HelpStrObj.GetLang() == TxtObj.GetLang() and \
+ HelpStrObj.GetString() == TxtObj.GetString():
+ break
+ else:
+ PcdDeclaration.SetHelpTextList(PcdDeclaration.GetHelpTextList() + [TxtObj])
+
+ #
+ # Get PCD error message from UNI file
+ #
+ for Index in range(0, len(PcdErrStrList)):
+ StrNameList = PcdErrStrList[Index][1].split('_')
+ if StrNameList[1].lower() == Item.TokenSpaceGuidCName.lower() and \
+ StrNameList[2].lower() == TAB_STR_TOKENERR.lower():
+ for PcdErr in PcdDeclaration.GetPcdErrorsList():
+ if PcdErr.GetErrorNumber().lower() == (TAB_HEX_START + StrNameList[3]).lower() and \
+ (PcdErrStrList[Index][0], PcdErrStrList[Index][2]) not in PcdErr.GetErrorMessageList():
+ PcdErr.SetErrorMessageList(PcdErr.GetErrorMessageList() + \
+ [(PcdErrStrList[Index][0], PcdErrStrList[Index][2])])
+
+ #
+ # Check to prevent missing error message if a Pcd has the error code.
+ #
+ for PcdErr in PcdDeclaration.GetPcdErrorsList():
+ if PcdErr.GetErrorNumber().strip():
+ if not PcdErr.GetErrorMessageList():
+ Logger.Error('UPT',
+ FORMAT_INVALID,
+ ST.ERR_DECPARSE_PCD_UNMATCHED_ERRORCODE % PcdErr.GetErrorNumber(),
+ ContainerFile,
+ PcdErr.GetLineNum(),
+ PcdErr.GetFileLine())
+
+ PcdDeclarations.append(PcdDeclaration)
+ self.SetPcdList(self.GetPcdList() + PcdDeclarations)
+ self.CheckPcdValue()
+
+ ##
+ # Get error message via language
+ # @param ErrorMessageList: Error message tuple list the language and its message
+ # @param Lang: the language of setting
+ # @return: the error message described in the related UNI file
+ def GetEnErrorMessage(self, ErrorMessageList):
+ if self.FullPath:
+ pass
+ Lang = TAB_LANGUAGE_EN_US
+ for (Language, Message) in ErrorMessageList:
+ if Language == Lang:
+ return Message
+ for (Language, Message) in ErrorMessageList:
+ if Language.find(TAB_LANGUAGE_EN) >= 0:
+ return Message
+ else:
+ try:
+ return ErrorMessageList[0][1]
+ except IndexError:
+ return ''
+ return ''
+
+ ##
+ # Replace the strings for Python eval function.
+ # @param ReplaceValue: The string that needs to be replaced.
+ # @return: The string was replaced, then eval function is always making out it.
+ def ReplaceForEval(self, ReplaceValue, IsRange=False, IsExpr=False):
+ if self.FullPath:
+ pass
+ #
+ # deal with "NOT EQ", "NOT LT", "NOT GT", "NOT LE", "NOT GE", "NOT NOT"
+ #
+ NOTNOT_Pattern = '[\t\s]*NOT[\t\s]+NOT[\t\s]*'
+ NOTGE_Pattern = '[\t\s]*NOT[\t\s]+GE[\t\s]*'
+ NOTLE_Pattern = '[\t\s]*NOT[\t\s]+LE[\t\s]*'
+ NOTGT_Pattern = '[\t\s]*NOT[\t\s]+GT[\t\s]*'
+ NOTLT_Pattern = '[\t\s]*NOT[\t\s]+LT[\t\s]*'
+ NOTEQ_Pattern = '[\t\s]*NOT[\t\s]+EQ[\t\s]*'
+ ReplaceValue = re.compile(NOTNOT_Pattern).sub('', ReplaceValue)
+ ReplaceValue = re.compile(NOTLT_Pattern).sub('x >= ', ReplaceValue)
+ ReplaceValue = re.compile(NOTGT_Pattern).sub('x <= ', ReplaceValue)
+ ReplaceValue = re.compile(NOTLE_Pattern).sub('x > ', ReplaceValue)
+ ReplaceValue = re.compile(NOTGE_Pattern).sub('x < ', ReplaceValue)
+ ReplaceValue = re.compile(NOTEQ_Pattern).sub('x != ', ReplaceValue)
+
+ if IsRange:
+ ReplaceValue = ReplaceValue.replace('EQ', 'x ==')
+ ReplaceValue = ReplaceValue.replace('LT', 'x <')
+ ReplaceValue = ReplaceValue.replace('LE', 'x <=')
+ ReplaceValue = ReplaceValue.replace('GT', 'x >')
+ ReplaceValue = ReplaceValue.replace('GE', 'x >=')
+ ReplaceValue = ReplaceValue.replace('XOR', 'x ^')
+ elif IsExpr:
+ ReplaceValue = ReplaceValue.replace('EQ', '==')
+ ReplaceValue = ReplaceValue.replace('NE', '!=')
+ ReplaceValue = ReplaceValue.replace('LT', '<')
+ ReplaceValue = ReplaceValue.replace('LE', '<=')
+ ReplaceValue = ReplaceValue.replace('GT', '>')
+ ReplaceValue = ReplaceValue.replace('GE', '>=')
+ ReplaceValue = ReplaceValue.replace('XOR', '^')
+
+ ReplaceValue = ReplaceValue.replace('AND', 'and')
+ ReplaceValue = ReplaceValue.replace('&&', ' and ')
+ ReplaceValue = ReplaceValue.replace('xor', '^')
+ ReplaceValue = ReplaceValue.replace('OR', 'or')
+ ReplaceValue = ReplaceValue.replace('||', ' or ')
+ ReplaceValue = ReplaceValue.replace('NOT', 'not')
+ if ReplaceValue.find('!') >= 0 and ReplaceValue[ReplaceValue.index('!') + 1] != '=':
+ ReplaceValue = ReplaceValue.replace('!', ' not ')
+ if '.' in ReplaceValue:
+ Pattern = '[a-zA-Z0-9]{1,}\.[a-zA-Z0-9]{1,}'
+ MatchedList = re.findall(Pattern, ReplaceValue)
+ for MatchedItem in MatchedList:
+ if MatchedItem not in self.PcdDefaultValueDict:
+ Logger.Error("Dec File Parser", FORMAT_INVALID, Message=ST.ERR_DECPARSE_PCD_NODEFINED % MatchedItem,
+ File=self.FullPath)
+
+ ReplaceValue = ReplaceValue.replace(MatchedItem, self.PcdDefaultValueDict[MatchedItem])
+
+ return ReplaceValue
+
+ ##
+ # Check pcd's default value according to the pcd's description
+ #
+ def CheckPcdValue(self):
+ for Pcd in self.GetPcdList():
+ self.PcdDefaultValueDict[TAB_SPLIT.join((Pcd.GetTokenSpaceGuidCName(), Pcd.GetCName())).strip()] = \
+ Pcd.GetDefaultValue()
+
+ for Pcd in self.GetPcdList():
+ ValidationExpressions = []
+ PcdGuidName = TAB_SPLIT.join((Pcd.GetTokenSpaceGuidCName(), Pcd.GetCName()))
+ Valids = Pcd.GetPcdErrorsList()
+ for Valid in Valids:
+ Expression = Valid.GetExpression()
+ if Expression:
+ #
+ # Delete the 'L' prefix of a quoted string, this operation is for eval()
+ #
+ QUOTED_PATTERN = '[\t\s]*L?"[^"]*"'
+ QuotedMatchedObj = re.search(QUOTED_PATTERN, Expression)
+ if QuotedMatchedObj:
+ MatchedStr = QuotedMatchedObj.group().strip()
+ if MatchedStr.startswith('L'):
+ Expression = Expression.replace(MatchedStr, MatchedStr[1:].strip())
+
+ Expression = self.ReplaceForEval(Expression, IsExpr=True)
+ Expression = Expression.replace(PcdGuidName, 'x')
+ Message = self.GetEnErrorMessage(Valid.GetErrorMessageList())
+ ValidationExpressions.append((Expression, Message))
+
+ ValidList = Valid.GetValidValue()
+ if ValidList:
+ ValidValue = 'x in %s' % [eval(v) for v in ValidList.split(' ') if v]
+ Message = self.GetEnErrorMessage(Valid.GetErrorMessageList())
+ ValidationExpressions.append((ValidValue, Message))
+
+ ValidValueRange = Valid.GetValidValueRange()
+ if ValidValueRange:
+ ValidValueRange = self.ReplaceForEval(ValidValueRange, IsRange=True)
+ if ValidValueRange.find('-') >= 0:
+ ValidValueRange = ValidValueRange.replace('-', '<= x <=')
+ elif not ValidValueRange.startswith('x ') and not ValidValueRange.startswith('not ') \
+ and not ValidValueRange.startswith('not(') and not ValidValueRange.startswith('('):
+ ValidValueRange = 'x %s' % ValidValueRange
+ Message = self.GetEnErrorMessage(Valid.GetErrorMessageList())
+ ValidationExpressions.append((ValidValueRange, Message))
+
+ DefaultValue = self.PcdDefaultValueDict[PcdGuidName.strip()]
+ #
+ # Delete the 'L' prefix of a quoted string, this operation is for eval()
+ #
+ QUOTED_PATTERN = '[\t\s]*L?"[^"]*"'
+ QuotedMatchedObj = re.search(QUOTED_PATTERN, DefaultValue)
+ if QuotedMatchedObj:
+ MatchedStr = QuotedMatchedObj.group().strip()
+ if MatchedStr.startswith('L'):
+ DefaultValue = DefaultValue.replace(MatchedStr, MatchedStr[1:].strip())
+
+ try:
+ DefaultValue = eval(DefaultValue.replace('TRUE', 'True').replace('true', 'True')
+ .replace('FALSE', 'False').replace('false', 'False'))
+ except BaseException:
+ pass
+
+ for (Expression, Msg) in ValidationExpressions:
+ try:
+ if not eval(Expression, {'x':DefaultValue}):
+ Logger.Error("Dec File Parser", FORMAT_INVALID, ExtraData='%s, value = %s' %\
+ (PcdGuidName, DefaultValue), Message=Msg, File=self.FullPath)
+ except TypeError:
+ Logger.Error("Dec File Parser", FORMAT_INVALID, ExtraData=PcdGuidName, \
+ Message=Msg, File=self.FullPath)
+
+ ## GenModuleFileList
+ #
+ def GenModuleFileList(self, ContainerFile):
+ ModuleFileList = []
+ ContainerFileName = os.path.basename(ContainerFile)
+ ContainerFilePath = os.path.dirname(ContainerFile)
+ for Item in GetFiles(ContainerFilePath,
+ ['CVS', '.svn'] + self.GetIncludePathList(), False):
+ ExtName = os.path.splitext(Item)[1]
+ if ExtName.lower() == '.inf':
+ ModuleFileList.append(Item)
+ elif ExtName.upper() == '.DEC' and self.CheckMulDec:
+ if Item == ContainerFileName:
+ continue
+ Logger.Error('MkPkg',
+ UPT_MUL_DEC_ERROR,
+ ST.ERR_MUL_DEC_ERROR%(ContainerFilePath,
+ ContainerFileName,
+ Item))
+
+ self.SetModuleFileList(ModuleFileList)
+
+ ## Show detailed information of Package
+ #
+ # Print all members and their values of Package class
+ #
+ def ShowPackage(self):
+ print('\nName =', self.GetName())
+ print('\nBaseName =', self.GetBaseName())
+ print('\nVersion =', self.GetVersion())
+ print('\nGuid =', self.GetGuid())
+
+ print('\nStandardIncludes = %d ' \
+ % len(self.GetStandardIncludeFileList()), end=' ')
+ for Item in self.GetStandardIncludeFileList():
+ print(Item.GetFilePath(), ' ', Item.GetSupArchList())
+ print('\nPackageIncludes = %d \n' \
+ % len(self.GetPackageIncludeFileList()), end=' ')
+ for Item in self.GetPackageIncludeFileList():
+ print(Item.GetFilePath(), ' ', Item.GetSupArchList())
+
+ print('\nGuids =', self.GetGuidList())
+ for Item in self.GetGuidList():
+ print(Item.GetCName(), Item.GetGuid(), Item.GetSupArchList())
+ print('\nProtocols =', self.GetProtocolList())
+ for Item in self.GetProtocolList():
+ print(Item.GetCName(), Item.GetGuid(), Item.GetSupArchList())
+ print('\nPpis =', self.GetPpiList())
+ for Item in self.GetPpiList():
+ print(Item.GetCName(), Item.GetGuid(), Item.GetSupArchList())
+ print('\nLibraryClasses =', self.GetLibraryClassList())
+ for Item in self.GetLibraryClassList():
+ print(Item.GetLibraryClass(), Item.GetRecommendedInstance(), \
+ Item.GetSupArchList())
+ print('\nPcds =', self.GetPcdList())
+ for Item in self.GetPcdList():
+ print('CName=', Item.GetCName(), 'TokenSpaceGuidCName=', \
+ Item.GetTokenSpaceGuidCName(), \
+ 'DefaultValue=', Item.GetDefaultValue(), \
+ 'ValidUsage=', Item.GetValidUsage(), \
+ 'SupArchList', Item.GetSupArchList(), \
+ 'Token=', Item.GetToken(), 'DatumType=', Item.GetDatumType())
+
+ for Item in self.GetMiscFileList():
+ print(Item.GetName())
+ for FileObjectItem in Item.GetFileList():
+ print(FileObjectItem.GetURI())
+ print('****************\n')
+
+## GenPcdDeclaration
+#
+# @param ContainerFile: File name of the DEC file
+# @param PcdInfo: Pcd information, of format (TokenGuidCName,
+# TokenName, Value, DatumType, Token, Type,
+# GenericComment, TailComment, Arch)
+# @param Language: The language of HelpText, Prompt
+#
+def GenPcdDeclaration(ContainerFile, PcdInfo, Language, MacroReplaceDict):
+ HelpStr = ''
+ PromptStr = ''
+ TailHelpStr = ''
+ TokenGuidCName, TokenName, Value, DatumType, Token, Type, \
+ GenericComment, TailComment, Arch = PcdInfo
+ Pcd = PcdObject()
+ Pcd.SetCName(TokenName)
+ Pcd.SetToken(Token)
+ Pcd.SetTokenSpaceGuidCName(TokenGuidCName)
+ Pcd.SetDatumType(DatumType)
+ Pcd.SetDefaultValue(Value)
+ Pcd.SetValidUsage(Type)
+ #
+ # MaxDatumSize is required field for 'VOID*' PCD
+ #
+ if DatumType == TAB_PTR_TYPE_PCD:
+ Pcd.SetMaxDatumSize(ITEM_UNDEFINED)
+
+ SupArchList = [Arch]
+ Pcd.SetSupArchList(SupArchList)
+
+ if GenericComment:
+ HelpStr, PcdErrList, PromptStr = ParseDecPcdGenericComment(GenericComment,
+ ContainerFile,
+ TokenGuidCName,
+ TokenName,
+ MacroReplaceDict)
+ if PcdErrList:
+ Pcd.SetPcdErrorsList(PcdErrList)
+
+ if TailComment:
+ SupModuleList, TailHelpStr = ParseDecPcdTailComment(TailComment,
+ ContainerFile)
+ if SupModuleList:
+ Pcd.SetSupModuleList(SupModuleList)
+
+ if HelpStr and (not HelpStr.endswith('\n')) and TailHelpStr:
+ HelpStr += '\n'
+ HelpStr += TailHelpStr
+ if HelpStr:
+ HelpTxtObj = TextObject()
+ HelpTxtObj.SetLang(Language)
+ HelpTxtObj.SetString(HelpStr)
+ Pcd.SetHelpTextList([HelpTxtObj])
+ if PromptStr:
+ TxtObj = TextObject()
+ TxtObj.SetLang(Language)
+ TxtObj.SetString(PromptStr)
+ Pcd.SetPromptList([TxtObj])
+
+ return Pcd
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
new file mode 100755
index 00000000..184c36c6
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
@@ -0,0 +1,1071 @@
+## @file InfPomAlignment.py
+# This file contained the adapter for convert INF parser object to POM Object
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+'''
+InfPomAlignment
+'''
+##
+# Import modules
+#
+import os.path
+from Logger import StringTable as ST
+import Logger.Log as Logger
+from Library.StringUtils import FORMAT_INVALID
+from Library.StringUtils import PARSER_ERROR
+from Library.StringUtils import NormPath
+from Library.StringUtils import GetSplitValueList
+from Library.Misc import ConvertVersionToDecimal
+from Library.Misc import GetHelpStringByRemoveHashKey
+from Library.Misc import ConvertArchList
+from Library.Misc import GetRelativePath
+from Library.Misc import PathClass
+from Library.Parsing import GetPkgInfoFromDec
+from Library.UniClassObject import UniFileClassObject
+from Library.UniClassObject import ConvertSpecialUnicodes
+from Library.UniClassObject import GetLanguageCode1766
+from Library import DataType as DT
+from Library import GlobalData
+from Library.ParserValidate import IsValidPath
+from Object.POM import CommonObject
+from Object.POM.ModuleObject import ModuleObject
+from Object.POM.ModuleObject import ExternObject
+from Object.POM.ModuleObject import HobObject
+from Object.POM.ModuleObject import EventObject
+from Object.POM.ModuleObject import BootModeObject
+from Object.POM.ModuleObject import PackageDependencyObject
+from Object.POM.ModuleObject import SourceFileObject
+from Object.POM.ModuleObject import DepexObject
+from Object.POM.ModuleObject import AsBuildLibraryClassObject
+from Object.POM.ModuleObject import AsBuiltObject
+from PomAdapter.InfPomAlignmentMisc import GenModuleHeaderUserExt
+from PomAdapter.InfPomAlignmentMisc import GenBinaryData
+from Parser import InfParser
+from PomAdapter.DecPomAlignment import DecPomAlignment
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+
+## InfPomAlignment
+#
+# Inherit from ModuleObject
+#
+class InfPomAlignment(ModuleObject):
+ ## Construct of InfPomAlignment
+ # Skip means that UPT don't care the syntax of INF, this may be the not
+ # distributed INF files during creation or the INF files checked for
+ # dependency rule during remove.
+ #
+ def __init__(self, FileName, WorkSpace=None, PackagePath='', Skip=False):
+ ModuleObject.__init__(self)
+ self.Parser = None
+ self.FileName = FileName
+ self.WorkSpace = WorkSpace
+ self.CombinePath = ''
+ self.LibModuleTypeList = []
+ self.FullPath = ''
+ self.ModulePath = ''
+ self.WorkspaceDir = " "
+ self.CustomMakefile = []
+ self.UniFileClassObject = None
+ self.SetPackagePath(PackagePath)
+ #
+ # Call GenInfPomObjects function to fill POM object.
+ #
+ if Skip:
+ OrigConfig = Logger.SUPRESS_ERROR
+ Logger.SUPRESS_ERROR = True
+ try:
+ self._GenInfPomObjects(Skip)
+ finally:
+ Logger.SUPRESS_ERROR = OrigConfig
+ else:
+ self._GenInfPomObjects(Skip)
+
+ ##
+ # Generate all POM objects, the original input comes
+ # from INF parser's output
+ #
+ def _GenInfPomObjects(self, Skip):
+ #
+ # Call INF Parser to get information from INF file
+ #
+ self.Parser = InfParser.InfParser(self.FileName, self.WorkSpace)
+ self.FullPath = self.Parser.FullPath
+ self.GetFullPath()
+ self._GenModuleHeader()
+ #
+ # Call GenBinaries after Module Header for Binary INF consideration.
+ #
+ self._GenBinaries()
+ self._GenBuildOptions()
+ self._GenLibraryClasses()
+ self._GenPackages(Skip)
+ self._GenPcds()
+ self._GenSources()
+ self._GenUserExtensions()
+ self._GenGuidProtocolPpis(DT.TAB_GUIDS)
+ self._GenGuidProtocolPpis(DT.TAB_PROTOCOLS)
+ self._GenGuidProtocolPpis(DT.TAB_PPIS)
+ self._GenDepexes()
+
+ ## Convert [Defines] section content to InfDefObject
+ #
+ # Convert [Defines] section content to InfDefObject
+ #
+ # @param Defines The content under [Defines] section
+ # @param ModuleHeader An object of ModuleHeaderClass
+ # @param Arch The supported ARCH
+ #
+ def _GenModuleHeader(self):
+ Logger.Debug(2, "Generate ModuleHeader ...")
+ #
+ # Get all defines information form InfParser Object
+ #
+ RecordSet = self.Parser.InfDefSection.Defines
+ #
+ # Should only have one ArchString Item.
+ #
+ ArchString = list(RecordSet.keys())[0]
+ ArchList = GetSplitValueList(ArchString, ' ')
+ ArchList = ConvertArchList(ArchList)
+ HasCalledFlag = False
+ #
+ # Get data from Sdict()
+ #
+ ValueList = RecordSet[ArchString]
+ self.SetFileName(self.FileName)
+ self.SetFullPath(self.FullPath)
+ #
+ # The INF's filename (without the directory path or the extension)
+ # must be used for the value of the
+ # ModuleSurfaceArea.Header.Name element
+ #
+ self.SetName(os.path.splitext(os.path.basename(self.FileName))[0])
+ self.WorkspaceDir = " "
+ #
+ # CombinePath and ModulePath
+ #
+ CombinePath = GetRelativePath(self.FullPath, self.WorkSpace)
+ self.SetCombinePath(CombinePath)
+ ModulePath = os.path.split(CombinePath)[0]
+ ModuleRelativePath = ModulePath
+ if self.GetPackagePath() != '':
+ ModuleRelativePath = GetRelativePath(ModulePath, self.GetPackagePath())
+ self.SetModulePath(ModuleRelativePath)
+ #
+ # For Define Seciton Items.
+ #
+ DefineObj = ValueList
+ #
+ # Convert UEFI/PI version to decimal number
+ #
+ if DefineObj.GetUefiSpecificationVersion() is not None:
+ __UefiVersion = DefineObj.GetUefiSpecificationVersion().GetValue()
+ __UefiVersion = ConvertVersionToDecimal(__UefiVersion)
+ self.SetUefiSpecificationVersion(str(__UefiVersion))
+ if DefineObj.GetPiSpecificationVersion() is not None:
+ __PiVersion = DefineObj.GetPiSpecificationVersion().GetValue()
+ __PiVersion = ConvertVersionToDecimal(__PiVersion)
+
+ self.SetPiSpecificationVersion(str(__PiVersion))
+ SpecList = DefineObj.GetSpecification()
+ NewSpecList = []
+ for SpecItem in SpecList:
+ NewSpecList.append((SpecItem[0], ConvertVersionToDecimal(SpecItem[1])))
+ self.SetSpecList(NewSpecList)
+
+ #
+ # must exist items in INF define section
+ # MODULE_TYPE/BASE_NAME/INF_VERSION/FILE_GUID/VERSION_STRING
+ #
+ if DefineObj.GetModuleType() is None:
+ Logger.Error("InfParser", FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("MODULE_TYPE"), File=self.FullPath)
+ else:
+ self.SetModuleType(DefineObj.GetModuleType().GetValue())
+ ModuleType = DefineObj.GetModuleType().GetValue()
+ if ModuleType:
+ #
+ # Drivers and applications are not allowed to have a MODULE_TYPE of "BASE". Only
+ # libraries are permitted to a have a MODULE_TYPE of "BASE".
+ #
+ if len(DefineObj.LibraryClass) == 0 and ModuleType == 'BASE':
+ Logger.Error("InfParser",
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_MODULETYPE_INVALID,
+ File=self.FullPath,
+ Line=DefineObj.ModuleType.CurrentLine.LineNo,
+ ExtraData=DefineObj.ModuleType.CurrentLine.LineString)
+ self.LibModuleTypeList.append(ModuleType)
+ if DefineObj.GetBaseName() is None:
+ Logger.Error("InfParser", FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("BASE_NAME"), File=self.FullPath)
+ else:
+ self.SetBaseName(DefineObj.GetBaseName().GetValue())
+ if DefineObj.GetModuleUniFileName():
+ self.UniFileClassObject = UniFileClassObject([PathClass(DefineObj.GetModuleUniFileName())])
+ else:
+ self.UniFileClassObject = None
+ if DefineObj.GetInfVersion() is None:
+ Logger.Error("InfParser", FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("INF_VERSION"), File=self.FullPath)
+ else:
+ self.SetVersion(DefineObj.GetInfVersion().GetValue())
+ if DefineObj.GetFileGuid() is None:
+ Logger.Error("InfParser", FORMAT_INVALID,
+ ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("FILE_GUID"), File=self.FullPath)
+ else:
+ self.SetGuid(DefineObj.GetFileGuid().GetValue())
+ if DefineObj.GetVersionString() is None:
+ #
+ # VERSION_STRING is missing from the [Defines] section, tools must assume that the module's version is 0.
+ #
+ self.SetVersion('0')
+ else:
+ #
+ # Get version of INF
+ #
+ if DefineObj.GetVersionString().GetValue() != "":
+ #
+ # EDK2 inf
+ #
+ VersionString = DefineObj.GetVersionString().GetValue()
+ if len(VersionString) > 0:
+ VersionString = ConvertVersionToDecimal(VersionString)
+ self.SetVersion(VersionString)
+ else:
+ #
+ # EDK1 inf
+ #
+ Logger.Error("Parser", PARSER_ERROR, ST.ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF, ExtraData=self.FullPath,
+ RaiseError=Logger.IS_RAISE_ERROR)
+ #
+ # if there is Shadow, Should judge the MODULE_TYPE in
+ # SEC, PEI_CORE and PEIM
+ #
+ if DefineObj.GetShadow():
+ ModuleTypeValue = DefineObj.GetModuleType().GetValue()
+ if not (ModuleTypeValue == 'SEC' or ModuleTypeValue == 'PEI_CORE' or ModuleTypeValue == 'PEIM'):
+ Logger.Error("InfParser", FORMAT_INVALID, ST.ERR_INF_PARSER_DEFINE_SHADOW_INVALID, File=self.FullPath)
+
+ if DefineObj.GetPcdIsDriver() is not None:
+ self.SetPcdIsDriver(DefineObj.GetPcdIsDriver().GetValue())
+ #
+ # LIBRARY_CLASS
+ #
+ self._GenModuleHeaderLibClass(DefineObj, ArchList)
+ #
+ # CUSTOM_MAKEFILE
+ #
+ self.CustomMakefile = DefineObj.GetCustomMakefile()
+ #
+ # Externs in Defines section
+ # Only one define section, so just call once.
+ #
+ if not HasCalledFlag:
+ self._GenModuleHeaderExterns(DefineObj)
+ HasCalledFlag = True
+ #
+ # each module has only one module header
+ #
+ self.SetSupArchList(ArchList)
+ #
+ # Get Hob/BootMode/EventList information
+ #
+ self._GenSpecialComments()
+ #
+ # put all define statement into user-extension sections
+ #
+ DefinesDictNew = GenModuleHeaderUserExt(DefineObj, ArchString)
+ if DefinesDictNew:
+ UserExtension = CommonObject.UserExtensionObject()
+ UserExtension.SetDefinesDict(DefinesDictNew)
+ UserExtension.SetIdentifier('DefineModifiers')
+ UserExtension.SetUserID('EDK2')
+ self.SetUserExtensionList(self.GetUserExtensionList() + [UserExtension])
+ #
+ # Get all meta-file header information
+ # the record is list of items formatted:
+ # [LineValue, Arch, StartLine, ID, Third]
+ #
+ InfHeaderObj = self.Parser.InfHeader
+ #
+ # Put header information into POM object
+ #
+ if self.UniFileClassObject:
+ Lang = DT.TAB_LANGUAGE_EN_X
+ else:
+ Lang = DT.TAB_LANGUAGE_EN_US
+ if InfHeaderObj.GetAbstract():
+ self.SetAbstract((Lang, InfHeaderObj.GetAbstract()))
+ if InfHeaderObj.GetDescription():
+ self.SetDescription((Lang, InfHeaderObj.GetDescription()))
+ if InfHeaderObj.GetCopyright():
+ self.SetCopyright(('', InfHeaderObj.GetCopyright()))
+ if InfHeaderObj.GetLicense():
+ self.SetLicense(('', InfHeaderObj.GetLicense()))
+ #
+ # Put Binary header information into POM object
+ #
+ InfBinaryHeaderObj = self.Parser.InfBinaryHeader
+ if InfBinaryHeaderObj.GetAbstract():
+ self.SetBinaryHeaderAbstract((Lang, InfBinaryHeaderObj.GetAbstract()))
+ if InfBinaryHeaderObj.GetDescription():
+ self.SetBinaryHeaderDescription((Lang, InfBinaryHeaderObj.GetDescription()))
+ if InfBinaryHeaderObj.GetCopyright():
+ self.SetBinaryHeaderCopyright(('', InfBinaryHeaderObj.GetCopyright()))
+ if InfBinaryHeaderObj.GetLicense():
+ self.SetBinaryHeaderLicense(('', InfBinaryHeaderObj.GetLicense()))
+
+ ## GenModuleHeaderLibClass
+ #
+ #
+ def _GenModuleHeaderLibClass(self, DefineObj, ArchList):
+ LibraryList = DefineObj.GetLibraryClass()
+ for LibraryItem in LibraryList:
+ Lib = CommonObject.LibraryClassObject()
+ Lib.SetLibraryClass(LibraryItem.GetLibraryName())
+ Lib.SetUsage(DT.USAGE_ITEM_PRODUCES)
+ SupModuleList = LibraryItem.GetTypes()
+ self.LibModuleTypeList += SupModuleList
+ Lib.SetSupModuleList(SupModuleList)
+ Lib.SetSupArchList(ArchList)
+ self.SetLibraryClassList(self.GetLibraryClassList() + [Lib])
+ self.SetIsLibrary(True)
+ self.SetIsLibraryModList(self.GetIsLibraryModList() + SupModuleList)
+
+ ## GenModuleHeaderExterns
+ #
+ #
+ def _GenModuleHeaderExterns(self, DefineObj):
+ EntryPointList = DefineObj.GetEntryPoint()
+ for EntryPoint in EntryPointList:
+ Image = ExternObject()
+ Image.SetEntryPoint(EntryPoint.GetCName())
+ #
+ # Future enhancement
+ #
+ self.SetExternList(self.GetExternList() + [Image])
+ #
+ # UNLOAD_IMAGE
+ #
+ UnloadImageList = DefineObj.GetUnloadImages()
+ for UnloadImage in UnloadImageList:
+ Image = ExternObject()
+ #
+ # Future enhancement
+ #
+ Image.SetUnloadImage(UnloadImage.GetCName())
+ self.SetExternList(self.GetExternList() + [Image])
+ #
+ # CONSTRUCTOR
+ #
+ ConstructorList = DefineObj.GetConstructor()
+ for ConstructorItem in ConstructorList:
+ Image = ExternObject()
+ #
+ # Future enhancement
+ #
+ Image.SetConstructor(ConstructorItem.GetCName())
+ self.SetExternList(self.GetExternList() + [Image])
+ #
+ # DESTRUCTOR
+ #
+ DestructorList = DefineObj.GetDestructor()
+ for DestructorItem in DestructorList:
+ Image = ExternObject()
+ #
+ # Future enhancement
+ #
+ Image.SetDestructor(DestructorItem.GetCName())
+ self.SetExternList(self.GetExternList() + [Image])
+
+ ## GenModuleHeaderExterns
+ # BootMode/HOB/Event
+ #
+ def _GenSpecialComments(self):
+ SpecialCommentsList = self.Parser.InfSpecialCommentSection.GetSpecialComments()
+ for Key in SpecialCommentsList:
+ if Key == DT.TYPE_HOB_SECTION:
+ HobList = []
+ for Item in SpecialCommentsList[Key]:
+ Hob = HobObject()
+ Hob.SetHobType(Item.GetHobType())
+ Hob.SetUsage(Item.GetUsage())
+ Hob.SetSupArchList(Item.GetSupArchList())
+ if Item.GetHelpString():
+ HelpTextObj = CommonObject.TextObject()
+ if self.UniFileClassObject:
+ HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
+ HelpTextObj.SetString(Item.GetHelpString())
+ Hob.SetHelpTextList([HelpTextObj])
+ HobList.append(Hob)
+ self.SetHobList(HobList)
+ elif Key == DT.TYPE_EVENT_SECTION:
+ EventList = []
+ for Item in SpecialCommentsList[Key]:
+ Event = EventObject()
+ Event.SetEventType(Item.GetEventType())
+ Event.SetUsage(Item.GetUsage())
+ if Item.GetHelpString():
+ HelpTextObj = CommonObject.TextObject()
+ if self.UniFileClassObject:
+ HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
+ HelpTextObj.SetString(Item.GetHelpString())
+ Event.SetHelpTextList([HelpTextObj])
+ EventList.append(Event)
+ self.SetEventList(EventList)
+ elif Key == DT.TYPE_BOOTMODE_SECTION:
+ BootModeList = []
+ for Item in SpecialCommentsList[Key]:
+ BootMode = BootModeObject()
+ BootMode.SetSupportedBootModes(Item.GetSupportedBootModes())
+ BootMode.SetUsage(Item.GetUsage())
+ if Item.GetHelpString():
+ HelpTextObj = CommonObject.TextObject()
+ if self.UniFileClassObject:
+ HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
+ HelpTextObj.SetString(Item.GetHelpString())
+ BootMode.SetHelpTextList([HelpTextObj])
+ BootModeList.append(BootMode)
+ self.SetBootModeList(BootModeList)
+
+ ## GenBuildOptions
+ #
+ # Gen BuildOptions of Inf
+ # [<Family>:]<ToolFlag>=Flag
+ #
+ #
+ def _GenBuildOptions(self):
+ Logger.Debug(2, "Generate %s ..." % DT.TAB_BUILD_OPTIONS)
+ #
+ # Get all BuildOptions
+ #
+ BuildOptionsList = self.Parser.InfBuildOptionSection.GetBuildOptions()
+ if not GlobalData.gIS_BINARY_INF:
+ BuildOptionDict = {}
+ for BuildOptionObj in BuildOptionsList:
+ ArchList = BuildOptionObj.GetSupArchList()
+ ArchList = ConvertArchList(ArchList)
+ BuildOptionsContent = BuildOptionObj.GetContent()
+ ArchString = ' '.join(ArchList)
+ if not BuildOptionsContent:
+ continue
+ BuildOptionDict[ArchString] = BuildOptionsContent
+ if not BuildOptionDict:
+ return
+ UserExtension = CommonObject.UserExtensionObject()
+ UserExtension.SetBuildOptionDict(BuildOptionDict)
+ UserExtension.SetIdentifier('BuildOptionModifiers')
+ UserExtension.SetUserID('EDK2')
+ self.SetUserExtensionList(self.GetUserExtensionList() + [UserExtension])
+ else:
+ #
+ # Not process this information, will be processed in GenBinaries()
+ #
+ pass
+
+ ## GenLibraryClasses
+ #
+ # Get LibraryClass of Inf
+ # <LibraryClassKeyWord>|<LibraryInstance>
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def _GenLibraryClasses(self):
+ Logger.Debug(2, "Generate %s ..." % DT.TAB_LIBRARY_CLASSES)
+ if not GlobalData.gIS_BINARY_INF:
+ #
+ # Get all LibraryClasses
+ #
+ for LibraryClassData in self.Parser.InfLibraryClassSection.LibraryClasses.values():
+ for Item in LibraryClassData:
+ LibraryClass = CommonObject.LibraryClassObject()
+ LibraryClass.SetUsage(DT.USAGE_ITEM_CONSUMES)
+ LibraryClass.SetLibraryClass(Item.GetLibName())
+ LibraryClass.SetRecommendedInstance(None)
+ LibraryClass.SetFeatureFlag(Item.GetFeatureFlagExp())
+ LibraryClass.SetSupArchList(ConvertArchList(Item.GetSupArchList()))
+ LibraryClass.SetSupModuleList(Item.GetSupModuleList())
+ HelpStringObj = Item.GetHelpString()
+ if HelpStringObj is not None:
+ CommentString = GetHelpStringByRemoveHashKey(HelpStringObj.HeaderComments +
+ HelpStringObj.TailComments)
+ HelpTextHeaderObj = CommonObject.TextObject()
+ if self.UniFileClassObject:
+ HelpTextHeaderObj.SetLang(DT.TAB_LANGUAGE_EN_X)
+ HelpTextHeaderObj.SetString(CommentString)
+ LibraryClass.SetHelpTextList([HelpTextHeaderObj])
+ self.SetLibraryClassList(self.GetLibraryClassList() + [LibraryClass])
+
+ ## GenPackages
+ #
+ # Gen Packages of Inf
+ #
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def _GenPackages(self, Skip):
+ Logger.Debug(2, "Generate %s ..." % DT.TAB_PACKAGES)
+ #
+ # Get all Packages
+ #
+ PackageObj = self.Parser.InfPackageSection.Packages
+ #
+ # Go through each arch
+ #
+ for PackageItemObj in PackageObj:
+ #
+ # Need package information for dependency check usage
+ #
+ PackageDependency = PackageDependencyObject()
+ PackageDependency.SetPackageFilePath(NormPath(PackageItemObj.GetPackageName()))
+ PackageDependency.SetSupArchList(ConvertArchList(PackageItemObj.GetSupArchList()))
+ PackageDependency.SetFeatureFlag(PackageItemObj.GetFeatureFlagExp())
+
+ PkgInfo = GetPkgInfoFromDec(mws.join(self.WorkSpace, NormPath(PackageItemObj.GetPackageName())))
+ if PkgInfo[1] and PkgInfo[2]:
+ PackageDependency.SetGuid(PkgInfo[1])
+ PackageDependency.SetVersion(PkgInfo[2])
+ elif Skip:
+ continue
+ else:
+ Logger.Error("\nUPT", PARSER_ERROR,
+ ST.ERR_INF_GET_PKG_DEPENDENCY_FAIL % PackageItemObj.GetPackageName(), File=self.FullPath)
+
+ PackageDependencyList = self.GetPackageDependencyList()
+ PackageDependencyList.append(PackageDependency)
+ self.SetPackageDependencyList(PackageDependencyList)
+
+ ## GenPcds
+ #
+ # Gen Pcds of Inf
+ # <TokenSpaceGuidCName>.<PcdCName>[|<Value> [|<FFE>]]
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def _GenPcds(self):
+ if not GlobalData.gIS_BINARY_INF:
+ Logger.Debug(2, "Generate %s ..." % DT.TAB_PCDS)
+ #
+ # Get all Pcds
+ #
+ PcdObj = self.Parser.InfPcdSection.Pcds
+ KeysList = PcdObj.keys()
+ #
+ # Go through each arch
+ #
+ for (PcdType, PcdKey) in KeysList:
+ PcdData = PcdObj[PcdType, PcdKey]
+ for PcdItemObj in PcdData:
+ CommentList = PcdItemObj.GetHelpStringList()
+ if CommentList:
+ for CommentItem in CommentList:
+ Pcd = CommonObject.PcdObject()
+ Pcd.SetCName(PcdItemObj.GetCName())
+ Pcd.SetTokenSpaceGuidCName(PcdItemObj.GetTokenSpaceGuidCName())
+ Pcd.SetDefaultValue(PcdItemObj.GetDefaultValue())
+ Pcd.SetItemType(PcdType)
+ Pcd.SetValidUsage(CommentItem.GetUsageItem())
+ Pcd.SetFeatureFlag(PcdItemObj.GetFeatureFlagExp())
+ Pcd.SetSupArchList(ConvertArchList(PcdItemObj.GetSupportArchList()))
+ HelpTextObj = CommonObject.TextObject()
+ if self.UniFileClassObject:
+ HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
+ HelpTextObj.SetString(CommentItem.GetHelpStringItem())
+ Pcd.SetHelpTextList([HelpTextObj])
+ PcdList = self.GetPcdList()
+ PcdList.append(Pcd)
+ self.SetPcdList(PcdList)
+
+ ## GenSources
+ #
+ # Gen Sources of Inf
+ # <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def _GenSources(self):
+ Logger.Debug(2, "Generate %s ..." % DT.TAB_SOURCES)
+
+ #
+ # Get all SourceFiles
+ #
+ SourceObj = self.Parser.InfSourcesSection.Sources
+ DataList = SourceObj.keys()
+ #
+ # Go through each arch
+ #
+ SourceList = []
+ for Key in DataList:
+ SourceData = SourceObj[Key]
+ for Item in SourceData:
+ SourceFile = Item.GetSourceFileName()
+ Family = Item.GetFamily()
+ FeatureFlag = Item.GetFeatureFlagExp()
+ SupArchList = sorted(ConvertArchList(Item.GetSupArchList()))
+ Source = SourceFileObject()
+ Source.SetSourceFile(SourceFile)
+ Source.SetFamily(Family)
+ Source.SetFeatureFlag(FeatureFlag)
+ Source.SetSupArchList(SupArchList)
+ SourceList.append(Source)
+
+ self.SetSourceFileList(self.GetSourceFileList() + SourceList)
+
+
+ ## GenUserExtensions
+ #
+ # Gen UserExtensions of Inf
+ #
+ def _GenUserExtensions(self):
+ #
+ # UserExtensions
+ #
+ UserExtensionObj = self.Parser.InfUserExtensionSection.UserExtension
+ Keys = UserExtensionObj.keys()
+
+ for Key in Keys:
+ UserExtensionData = UserExtensionObj[Key]
+ for UserExtensionDataObj in UserExtensionData:
+ UserExtension = CommonObject.UserExtensionObject()
+ UserId = UserExtensionDataObj.GetUserId()
+ if UserId.startswith('"') and UserId.endswith('"'):
+ UserId = UserId[1:-1]
+ UserExtension.SetUserID(UserId)
+ Identifier = UserExtensionDataObj.GetIdString()
+ if Identifier.startswith('"') and Identifier.endswith('"'):
+ Identifier = Identifier[1:-1]
+ #
+ # Generate miscellaneous files on INF file
+ #
+ if UserId == 'TianoCore' and Identifier == 'ExtraFiles':
+ self._GenMiscFiles(UserExtensionDataObj.GetContent())
+ UserExtension.SetIdentifier(Identifier)
+ UserExtension.SetStatement(UserExtensionDataObj.GetContent())
+ UserExtension.SetSupArchList(ConvertArchList(UserExtensionDataObj.GetSupArchList()))
+ self.SetUserExtensionList(self.GetUserExtensionList() + [UserExtension])
+
+ #
+ # Gen UserExtensions of TianoCore."BinaryHeader"
+ #
+
+ #Get Binary header from INF file
+ BinaryAbstractList = self.BinaryHeaderAbstractList
+ BinaryDescriptionList = self.BinaryHeaderDescriptionList
+ BinaryCopyrightList = self.BinaryHeaderCopyrightList
+ BinaryLicenseList = self.BinaryHeaderLicenseList
+ #Get Binary header from UNI file
+ # Initialize UniStrDict, the top keys are language codes
+ UniStrDict = {}
+ if self.UniFileClassObject:
+ UniStrDict = self.UniFileClassObject.OrderedStringList
+ for Lang in UniStrDict:
+ for StringDefClassObject in UniStrDict[Lang]:
+ Lang = GetLanguageCode1766(Lang)
+ if StringDefClassObject.StringName == DT.TAB_INF_BINARY_ABSTRACT:
+ BinaryAbstractList.append((Lang, ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
+ if StringDefClassObject.StringName == DT.TAB_INF_BINARY_DESCRIPTION:
+ BinaryDescriptionList.append((Lang, ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
+ if BinaryAbstractList or BinaryDescriptionList or BinaryCopyrightList or BinaryLicenseList:
+ BinaryUserExtension = CommonObject.UserExtensionObject()
+ BinaryUserExtension.SetBinaryAbstract(BinaryAbstractList)
+ BinaryUserExtension.SetBinaryDescription(BinaryDescriptionList)
+ BinaryUserExtension.SetBinaryCopyright(BinaryCopyrightList)
+ BinaryUserExtension.SetBinaryLicense(BinaryLicenseList)
+ BinaryUserExtension.SetIdentifier(DT.TAB_BINARY_HEADER_IDENTIFIER)
+ BinaryUserExtension.SetUserID(DT.TAB_BINARY_HEADER_USERID)
+ self.SetUserExtensionList(self.GetUserExtensionList() + [BinaryUserExtension])
+
+ def _GenDepexesList(self, SmmDepexList, DxeDepexList, PeiDepexList):
+ if SmmDepexList:
+ self.SetSmmDepex(SmmDepexList)
+ if DxeDepexList:
+ self.SetDxeDepex(DxeDepexList)
+ if PeiDepexList:
+ self.SetPeiDepex(PeiDepexList)
+
+ ## GenDepexes
+ #
+ # Gen Depex of Inf
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def _GenDepexes(self):
+ Logger.Debug(2, "Generate %s ..." % DT.TAB_DEPEX)
+
+ PEI_LIST = [DT.SUP_MODULE_PEIM]
+ SMM_LIST = [DT.SUP_MODULE_DXE_SMM_DRIVER]
+ DXE_LIST = [DT.SUP_MODULE_DXE_DRIVER, DT.SUP_MODULE_DXE_SAL_DRIVER,
+ DT.SUP_MODULE_DXE_RUNTIME_DRIVER]
+
+ IsLibraryClass = self.GetIsLibrary()
+ #
+ # Get all Depexes
+ #
+ DepexData = self.Parser.InfDepexSection.GetDepex()
+ SmmDepexList = []
+ DxeDepexList = []
+ PeiDepexList = []
+ for Depex in DepexData:
+ ModuleType = Depex.GetModuleType()
+ ModuleTypeList = []
+ if IsLibraryClass:
+ if self.GetModuleType() == 'BASE' and not ModuleType:
+ Logger.Error("\nMkPkg", PARSER_ERROR,
+ ST.ERR_INF_PARSER_DEPEX_SECTION_INVALID_FOR_BASE_LIBRARY_CLASS,
+ self.GetFullPath(), RaiseError=True)
+ if self.GetModuleType() != 'BASE' and not self.GetIsLibraryModList():
+ Logger.Error("\nMkPkg", PARSER_ERROR, ST.ERR_INF_PARSER_DEPEX_SECTION_INVALID_FOR_LIBRARY_CLASS,
+ self.GetFullPath(), RaiseError=True)
+ if self.GetModuleType() != 'BASE' and ModuleType and ModuleType not in self.GetIsLibraryModList():
+ Logger.Error("\nMkPkg", PARSER_ERROR, ST.ERR_INF_PARSER_DEPEX_SECTION_NOT_DETERMINED,
+ self.GetFullPath(), RaiseError=True)
+ if ModuleType:
+ ModuleTypeList = [ModuleType]
+ else:
+ for ModuleTypeInList in self.GetIsLibraryModList():
+ if ModuleTypeInList in DT.VALID_DEPEX_MODULE_TYPE_LIST:
+ ModuleTypeList.append(ModuleTypeInList)
+ if not ModuleTypeList:
+ Logger.Error("\nMkPkg", PARSER_ERROR, ST.ERR_INF_PARSER_DEPEX_SECTION_NOT_DETERMINED,
+ self.GetFullPath(), RaiseError=True)
+ else:
+ if not ModuleType:
+ ModuleType = self.ModuleType
+ if ModuleType not in DT.VALID_DEPEX_MODULE_TYPE_LIST:
+ Logger.Error("\nMkPkg", PARSER_ERROR,
+ ST.ERR_INF_PARSER_DEPEX_SECTION_MODULE_TYPE_ERROR % (ModuleType),
+ self.GetFullPath(), RaiseError=True)
+ if ModuleType != self.ModuleType:
+ Logger.Error("\nMkPkg", PARSER_ERROR, ST.ERR_INF_PARSER_DEPEX_SECTION_NOT_DETERMINED,
+ self.GetFullPath(), RaiseError=True)
+ ModuleTypeList = [ModuleType]
+ for ModuleType in ModuleTypeList:
+ DepexIns = DepexObject()
+ DepexIns.SetDepex(Depex.GetDepexContent())
+ if IsLibraryClass:
+ DepexIns.SetModuleType(ModuleType)
+ else:
+ if Depex.GetModuleType():
+ DepexIns.SetModuleType(Depex.GetModuleType())
+ DepexIns.SetSupArchList(ConvertArchList([Depex.GetSupArch()]))
+ DepexIns.SetFeatureFlag(Depex.GetFeatureFlagExp())
+ if Depex.HelpString:
+ HelpIns = CommonObject.TextObject()
+ if self.UniFileClassObject:
+ HelpIns.SetLang(DT.TAB_LANGUAGE_EN_X)
+ HelpIns.SetString(GetHelpStringByRemoveHashKey(Depex.HelpString))
+ DepexIns.SetHelpText(HelpIns)
+
+ if ModuleType in SMM_LIST:
+ SmmDepexList.append(DepexIns)
+ if ModuleType in DXE_LIST:
+ DxeDepexList.append(DepexIns)
+ if ModuleType in PEI_LIST:
+ PeiDepexList.append(DepexIns)
+ if ModuleType == DT.SUP_MODULE_UEFI_DRIVER:
+ if IsLibraryClass:
+ DxeDepexList.append(DepexIns)
+ else:
+ Logger.Error("\nMkPkg", PARSER_ERROR, ST.ERR_INF_PARSER_DEPEX_SECTION_INVALID_FOR_DRIVER,
+ self.GetFullPath(), RaiseError=True)
+
+ #End of for ModuleType in ModuleTypeList
+ self._GenDepexesList(SmmDepexList, DxeDepexList, PeiDepexList)
+ #End of for Depex in DepexData
+
+ ## GenBinaries
+ #
+ # Gen Binary of Inf, must be called after Pcd/Library is generated
+ # <FileType>|<Filename>|<Target>[|<TokenSpaceGuidCName>.<PcdCName>]
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def _GenBinaries(self):
+ Logger.Debug(2, "Generate %s ..." % DT.TAB_BINARIES)
+ BinariesDict = {}
+
+ #
+ # Get all Binary data
+ #
+ BinaryObj = self.Parser.InfBinariesSection.GetBinary()
+
+ BinaryData = BinaryObj.keys()
+
+ #
+ # If the INF file does not contain a [Sources] section, and the INF file does contain a [Binaries] section,
+ # then the ModuleSurfaceArea.BinaryModule attribute must be set to true. Otherwise, do not use the attribute
+ #
+ if BinaryObj and not self.Parser.InfSourcesSection.GetSources():
+ self.BinaryModule = True
+ else:
+ self.BinaryModule = False
+
+ BinaryFileObjectList = []
+ AsBuildLibraryClassList = []
+ AsBuildBuildOptionList = []
+ AsBuildIns = AsBuiltObject()
+ #
+ # Library AsBuild Info
+ #
+ for LibItem in self.Parser.InfLibraryClassSection.GetLibraryClasses():
+ AsBuildLibIns = AsBuildLibraryClassObject()
+ AsBuildLibIns.SetLibGuid(LibItem.GetFileGuid())
+ AsBuildLibIns.SetLibVersion(LibItem.GetVersion())
+ AsBuildLibIns.SetSupArchList(LibItem.GetSupArchList())
+ AsBuildLibraryClassList.append(AsBuildLibIns)
+ AsBuildIns.SetLibraryInstancesList(AsBuildLibraryClassList)
+
+ #
+ # BuildOption AsBuild Info
+ #
+ for BuildOptionItem in self.Parser.InfBuildOptionSection.GetBuildOptions():
+ AsBuildBuildOptionList.append(BuildOptionItem)
+ AsBuildIns.SetBuildFlagsList(AsBuildBuildOptionList)
+
+ #
+ # PatchPcd and PcdEx
+ #
+ AsBuildIns = self._GenAsBuiltPcds(self.Parser.InfPcdSection.GetPcds(), AsBuildIns)
+
+ #
+ # Parse the DEC file that contains the GUID value of the GUID CName which is used by
+ # SUBTYPE_GUID type binary file in the Binaries section in the INF file
+ #
+ DecObjList = []
+ if not self.PackagePath:
+ WorkSpace = os.path.normpath(self.WorkSpace)
+ TempPath = ModulePath = os.path.normpath(self.ModulePath)
+ while ModulePath:
+ TempPath = ModulePath
+ ModulePath = os.path.dirname(ModulePath)
+ PackageName = TempPath
+ DecFilePath = os.path.normpath(os.path.join(WorkSpace, PackageName))
+ if DecFilePath:
+ for File in os.listdir(DecFilePath):
+ if File.upper().endswith('.DEC'):
+ DecFileFullPath = os.path.normpath(os.path.join(DecFilePath, File))
+ DecObjList.append(DecPomAlignment(DecFileFullPath, self.WorkSpace))
+
+
+ BinariesDict, AsBuildIns, BinaryFileObjectList = GenBinaryData(BinaryData, BinaryObj,
+ BinariesDict,
+ AsBuildIns,
+ BinaryFileObjectList,
+ self.GetSupArchList(),
+ self.BinaryModule,
+ DecObjList)
+
+ BinariesDict2 = {}
+ for Key in BinariesDict:
+ ValueList = BinariesDict[Key]
+ if len(ValueList) > 1:
+ BinariesDict2[Key] = ValueList
+ else:
+ #
+ # if there is no TagName, ToolCode, HelpStr,
+ # then do not need to put them into userextension
+ #
+ (Target, Family, TagName, HelpStr) = ValueList[0]
+ if not (Target or Family or TagName or HelpStr):
+ continue
+ else:
+ BinariesDict2[Key] = ValueList
+
+ self.SetBinaryFileList(self.GetBinaryFileList() + BinaryFileObjectList)
+
+ if BinariesDict2:
+ UserExtension = CommonObject.UserExtensionObject()
+ UserExtension.SetBinariesDict(BinariesDict2)
+ UserExtension.SetIdentifier('BinaryFileModifiers')
+ UserExtension.SetUserID('EDK2')
+ self.SetUserExtensionList(self.GetUserExtensionList() + [UserExtension])
+
+ ## GenAsBuiltPcds
+ #
+ #
+ def _GenAsBuiltPcds(self, PcdList, AsBuildIns):
+ AsBuildPatchPcdList = []
+ AsBuildPcdExList = []
+ #
+ # Pcd AsBuild Info
+ #
+ for PcdItem in PcdList:
+ if PcdItem[0].upper() == DT.TAB_INF_PATCH_PCD.upper():
+ PcdItemObj = PcdItem[1]
+ Pcd = CommonObject.PcdObject()
+ Pcd.SetCName(PcdItemObj.GetCName())
+ Pcd.SetTokenSpaceGuidCName(PcdItemObj.GetTokenSpaceGuidCName())
+ if PcdItemObj.GetTokenSpaceGuidValue() == '' and self.BinaryModule:
+ Logger.Error("\nMkPkg",
+ PARSER_ERROR,
+ ST.ERR_ASBUILD_PCD_TOKENSPACE_GUID_VALUE_MISS % \
+ (PcdItemObj.GetTokenSpaceGuidCName()),
+ self.GetFullPath(), RaiseError=True)
+ else:
+ Pcd.SetTokenSpaceGuidValue(PcdItemObj.GetTokenSpaceGuidValue())
+ if (PcdItemObj.GetToken() == '' or PcdItemObj.GetDatumType() == '') and self.BinaryModule:
+ Logger.Error("\nMkPkg",
+ PARSER_ERROR,
+ ST.ERR_ASBUILD_PCD_DECLARITION_MISS % \
+ (PcdItemObj.GetTokenSpaceGuidCName() + '.' + PcdItemObj.GetCName()),
+ self.GetFullPath(), RaiseError=True)
+ Pcd.SetToken(PcdItemObj.GetToken())
+ Pcd.SetDatumType(PcdItemObj.GetDatumType())
+ Pcd.SetMaxDatumSize(PcdItemObj.GetMaxDatumSize())
+ Pcd.SetDefaultValue(PcdItemObj.GetDefaultValue())
+ Pcd.SetOffset(PcdItemObj.GetOffset())
+ Pcd.SetItemType(PcdItem[0])
+ Pcd.SetFeatureFlag(PcdItemObj.GetFeatureFlagExp())
+ Pcd.SetSupArchList(ConvertArchList(PcdItemObj.GetSupportArchList()))
+ Pcd.SetValidUsage(PcdItemObj.GetValidUsage())
+ for CommentItem in PcdItemObj.GetHelpStringList():
+ HelpTextObj = CommonObject.TextObject()
+ if self.UniFileClassObject:
+ HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
+ HelpTextObj.SetString(CommentItem.GetHelpStringItem())
+ Pcd.SetHelpTextList(Pcd.GetHelpTextList() + [HelpTextObj])
+ AsBuildPatchPcdList.append(Pcd)
+ elif PcdItem[0].upper() == DT.TAB_INF_PCD_EX.upper():
+ PcdItemObj = PcdItem[1]
+ Pcd = CommonObject.PcdObject()
+ Pcd.SetTokenSpaceGuidValue(PcdItemObj.GetTokenSpaceGuidValue())
+ Pcd.SetToken(PcdItemObj.GetToken())
+ Pcd.SetDatumType(PcdItemObj.GetDatumType())
+ Pcd.SetMaxDatumSize(PcdItemObj.GetMaxDatumSize())
+ Pcd.SetDefaultValue(PcdItemObj.GetDefaultValue())
+ Pcd.SetItemType(PcdItem[0])
+ Pcd.SetFeatureFlag(PcdItemObj.GetFeatureFlagExp())
+ Pcd.SetSupArchList(ConvertArchList(PcdItemObj.GetSupportArchList()))
+ Pcd.SetValidUsage(PcdItemObj.GetValidUsage())
+ for CommentItem in PcdItemObj.GetHelpStringList():
+ HelpTextObj = CommonObject.TextObject()
+ if self.UniFileClassObject:
+ HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
+ HelpTextObj.SetString(CommentItem.GetHelpStringItem())
+ Pcd.SetHelpTextList(Pcd.GetHelpTextList() + [HelpTextObj])
+ AsBuildPcdExList.append(Pcd)
+ AsBuildIns.SetPatchPcdList(AsBuildPatchPcdList)
+ AsBuildIns.SetPcdExList(AsBuildPcdExList)
+
+ return AsBuildIns
+
+ ## GenGuidProtocolPpis
+ #
+ # Gen Guids/Protocol/Ppis of INF
+ # <CName>=<GuidValue>
+ #
+ def _GenGuidProtocolPpis(self, Type):
+ Logger.Debug(2, "Generate %s ..." % Type)
+ #
+ # Get all Guid/Protocol/Ppis data
+ #
+ GuidObj = self.Parser.InfGuidSection.GetGuid()
+ ProtocolObj = self.Parser.InfProtocolSection.GetProtocol()
+ PpisObj = self.Parser.InfPpiSection.GetPpi()
+
+ GuidProtocolPpiList = []
+
+ if Type == DT.TAB_GUIDS:
+ GuidData = GuidObj.keys()
+ for Item in GuidData:
+ CommentList = Item.GetCommentList()
+ #
+ # Depend on CommentList content
+ # generate muti-guid-obj
+ #
+ if CommentList:
+ for GuidComentItem in CommentList:
+ ListObject = CommonObject.GuidObject()
+ ListObject.SetGuidTypeList([GuidComentItem.GetGuidTypeItem()])
+ ListObject.SetVariableName(GuidComentItem.GetVariableNameItem())
+ ListObject.SetUsage(GuidComentItem.GetUsageItem())
+ ListObject.SetName(Item.GetName())
+ ListObject.SetCName(Item.GetName())
+ ListObject.SetSupArchList(ConvertArchList(Item.GetSupArchList()))
+ ListObject.SetFeatureFlag(Item.GetFeatureFlagExp())
+ HelpString = GuidComentItem.GetHelpStringItem()
+ if HelpString.strip():
+ HelpTxtTailObj = CommonObject.TextObject()
+ if self.UniFileClassObject:
+ HelpTxtTailObj.SetLang(DT.TAB_LANGUAGE_EN_X)
+ HelpTxtTailObj.SetString(HelpString)
+ ListObject.SetHelpTextList([HelpTxtTailObj])
+
+ GuidProtocolPpiList.append(ListObject)
+ elif Type == DT.TAB_PROTOCOLS:
+ ProtocolData = ProtocolObj.keys()
+ for Item in ProtocolData:
+ CommentList = Item.GetCommentList()
+ for CommentItem in CommentList:
+ ListObject = CommonObject.ProtocolObject()
+ ListObject.SetCName(Item.GetName())
+ ListObject.SetSupArchList(ConvertArchList(Item.GetSupArchList()))
+ ListObject.SetFeatureFlag(Item.GetFeatureFlagExp())
+ ListObject.SetNotify(CommentItem.GetNotify())
+ ListObject.SetUsage(CommentItem.GetUsageItem())
+ HelpString = CommentItem.GetHelpStringItem()
+ if HelpString.strip():
+ HelpTxtObj = CommonObject.TextObject()
+ if self.UniFileClassObject:
+ HelpTxtObj.SetLang(DT.TAB_LANGUAGE_EN_X)
+ HelpTxtObj.SetString(HelpString)
+ ListObject.SetHelpTextList([HelpTxtObj])
+ GuidProtocolPpiList.append(ListObject)
+ elif Type == DT.TAB_PPIS:
+ PpiData = PpisObj.keys()
+ for Item in PpiData:
+ CommentList = Item.GetCommentList()
+ for CommentItem in CommentList:
+ ListObject = CommonObject.PpiObject()
+ ListObject.SetCName(Item.GetName())
+ ListObject.SetSupArchList(ConvertArchList(Item.GetSupArchList()))
+ ListObject.SetFeatureFlag(Item.GetFeatureFlagExp())
+ ListObject.SetNotify(CommentItem.GetNotify())
+ ListObject.SetUsage(CommentItem.GetUsage())
+ HelpString = CommentItem.GetHelpStringItem()
+ if HelpString.strip():
+ HelpTextObj = CommonObject.TextObject()
+ if self.UniFileClassObject:
+ HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
+ HelpTextObj.SetString(HelpString)
+ ListObject.SetHelpTextList([HelpTextObj])
+ GuidProtocolPpiList.append(ListObject)
+
+ if Type == DT.TAB_GUIDS:
+ self.SetGuidList(self.GetGuidList() + GuidProtocolPpiList)
+ elif Type == DT.TAB_PROTOCOLS:
+ self.SetProtocolList(self.GetProtocolList() + GuidProtocolPpiList)
+ elif Type == DT.TAB_PPIS:
+ self.SetPpiList(self.GetPpiList() + GuidProtocolPpiList)
+
+ ## GenMiscFiles
+ #
+ # Gen MiscellaneousFiles of Inf
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def _GenMiscFiles(self, Content):
+ MiscFileObj = CommonObject.MiscFileObject()
+ for Line in Content.splitlines():
+ FileName = ''
+ if '#' in Line:
+ FileName = Line[:Line.find('#')]
+ else:
+ FileName = Line
+ if FileName:
+ if IsValidPath(FileName, GlobalData.gINF_MODULE_DIR):
+ FileObj = CommonObject.FileObject()
+ FileObj.SetURI(FileName)
+ MiscFileObj.SetFileList(MiscFileObj.GetFileList()+[FileObj])
+ else:
+ Logger.Error("InfParser",
+ FORMAT_INVALID,
+ ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(Line),
+ File=GlobalData.gINF_MODULE_NAME,
+ ExtraData=Line)
+ self.SetMiscFileList(self.GetMiscFileList()+[MiscFileObj])
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
new file mode 100755
index 00000000..c422de26
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
@@ -0,0 +1,255 @@
+## @file InfPomAlignmentMisc.py
+# This file contained the routines for InfPomAlignment
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+InfPomAlignmentMisc
+'''
+
+##
+# Import modules
+#
+import Logger.Log as Logger
+from Library import DataType as DT
+from Library.Misc import ConvertArchList
+from Object.POM.ModuleObject import BinaryFileObject
+from Object.POM import CommonObject
+from Library.StringUtils import FORMAT_INVALID
+from Library.Misc import CheckGuidRegFormat
+from Logger import StringTable as ST
+
+
+## GenModuleHeaderUserExt
+#
+#
+def GenModuleHeaderUserExt(DefineObj, ArchString):
+ DefinesDictNew = {}
+ EdkReleaseVersion = DefineObj.GetEdkReleaseVersion()
+ Shadow = DefineObj.GetShadow()
+ DpxSource = DefineObj.GetDpxSource()
+ PciVendorId = DefineObj.GetPciVendorId()
+ PciDeviceId = DefineObj.GetPciDeviceId()
+ PciClassCode = DefineObj.GetPciClassCode()
+ PciRevision = DefineObj.GetPciRevision()
+ PciCompress = DefineObj.GetPciCompress()
+ CustomMakefile = DefineObj.GetCustomMakefile()
+ UefiHiiResourceSection = DefineObj.GetUefiHiiResourceSection()
+
+ if EdkReleaseVersion is not None:
+ Name = DT.TAB_INF_DEFINES_EDK_RELEASE_VERSION
+ Value = EdkReleaseVersion.GetValue()
+ Statement = _GenInfDefineStateMent(EdkReleaseVersion.Comments.GetHeaderComments(),
+ Name,
+ Value,
+ EdkReleaseVersion.Comments.GetTailComments())
+ DefinesDictNew[Statement] = ArchString
+
+ if Shadow is not None:
+ Name = DT.TAB_INF_DEFINES_SHADOW
+ Value = Shadow.GetValue()
+ Statement = _GenInfDefineStateMent(Shadow.Comments.GetHeaderComments(),
+ Name,
+ Value,
+ Shadow.Comments.GetTailComments())
+ DefinesDictNew[Statement] = ArchString
+
+ if DpxSource is not None:
+ Name = DT.TAB_INF_DEFINES_DPX_SOURCE
+ for DpxSourceItem in DpxSource:
+ Value = DpxSourceItem[0]
+ Statement = _GenInfDefineStateMent(DpxSourceItem[1].GetHeaderComments(),
+ Name,
+ Value,
+ DpxSourceItem[1].GetTailComments())
+ DefinesDictNew[Statement] = ArchString
+
+ if PciVendorId is not None:
+ Name = DT.TAB_INF_DEFINES_PCI_VENDOR_ID
+ Value = PciVendorId.GetValue()
+ Statement = _GenInfDefineStateMent(PciVendorId.Comments.GetHeaderComments(),
+ Name,
+ Value,
+ PciVendorId.Comments.GetTailComments())
+ DefinesDictNew[Statement] = ArchString
+
+ if PciDeviceId is not None:
+ Name = DT.TAB_INF_DEFINES_PCI_DEVICE_ID
+ Value = PciDeviceId.GetValue()
+ Statement = _GenInfDefineStateMent(PciDeviceId.Comments.GetHeaderComments(),
+ Name,
+ Value,
+ PciDeviceId.Comments.GetTailComments())
+ DefinesDictNew[Statement] = ArchString
+
+ if PciClassCode is not None:
+ Name = DT.TAB_INF_DEFINES_PCI_CLASS_CODE
+ Value = PciClassCode.GetValue()
+ Statement = _GenInfDefineStateMent(PciClassCode.Comments.GetHeaderComments(),
+ Name,
+ Value,
+ PciClassCode.Comments.GetTailComments())
+ DefinesDictNew[Statement] = ArchString
+
+ if PciRevision is not None:
+ Name = DT.TAB_INF_DEFINES_PCI_REVISION
+ Value = PciRevision.GetValue()
+ Statement = _GenInfDefineStateMent(PciRevision.Comments.GetHeaderComments(),
+ Name,
+ Value,
+ PciRevision.Comments.GetTailComments())
+ DefinesDictNew[Statement] = ArchString
+
+ if PciCompress is not None:
+ Name = DT.TAB_INF_DEFINES_PCI_COMPRESS
+ Value = PciCompress.GetValue()
+ Statement = _GenInfDefineStateMent(PciCompress.Comments.GetHeaderComments(),
+ Name,
+ Value,
+ PciCompress.Comments.GetTailComments())
+ DefinesDictNew[Statement] = ArchString
+
+ if len(CustomMakefile) >= 1:
+ for CustomMakefileItem in CustomMakefile:
+ Name = DT.TAB_INF_DEFINES_CUSTOM_MAKEFILE
+ #
+ # Not with Feature Flag Expression
+ #
+ if len(CustomMakefileItem) == 3:
+ if CustomMakefileItem[0] != '':
+ Value = CustomMakefileItem[0] + ' | ' + CustomMakefileItem[1]
+ else:
+ Value = CustomMakefileItem[1]
+
+ Comments = CustomMakefileItem[2]
+ Statement = _GenInfDefineStateMent(Comments.GetHeaderComments(),
+ Name,
+ Value,
+ Comments.GetTailComments())
+
+ DefinesDictNew[Statement] = ArchString
+
+ if UefiHiiResourceSection is not None:
+ Name = DT.TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION
+ Value = UefiHiiResourceSection.GetValue()
+ HeaderComment = UefiHiiResourceSection.Comments.GetHeaderComments()
+ TailComment = UefiHiiResourceSection.Comments.GetTailComments()
+ Statement = _GenInfDefineStateMent(HeaderComment,
+ Name,
+ Value,
+ TailComment)
+ DefinesDictNew[Statement] = ""
+
+ return DefinesDictNew
+
+
+## Generate the define statement that will be put into userextension
+# Not support comments.
+#
+# @param HeaderComment: the original header comment (# not removed)
+# @param Name: the definition keyword, should not be empty or none
+# @param Value: the definition keyword value
+# @param TailComment: the original Tail comment (# not removed)
+#
+# @return: the regenerated define statement
+#
+def _GenInfDefineStateMent(HeaderComment, Name, Value, TailComment):
+ Logger.Debug(5, HeaderComment + TailComment)
+ Statement = '%s = %s' % (Name, Value)
+
+ return Statement
+
+## GenBinaryData
+#
+#
+def GenBinaryData(BinaryData, BinaryObj, BinariesDict, AsBuildIns, BinaryFileObjectList, \
+ SupArchList, BinaryModule, DecObjList=None):
+ if BinaryModule:
+ pass
+ OriSupArchList = SupArchList
+ for Item in BinaryData:
+ ItemObj = BinaryObj[Item][0][0]
+ if ItemObj.GetType() not in DT.BINARY_FILE_TYPE_UI_LIST + DT.BINARY_FILE_TYPE_VER_LIST:
+ TagName = ItemObj.GetTagName()
+ Family = ItemObj.GetFamily()
+ else:
+ TagName = ''
+ Family = ''
+
+ FFE = ItemObj.GetFeatureFlagExp()
+
+ #
+ # If have architecturie specified, then use the specified architecturie;
+ # If the section tag does not have an architecture modifier or the modifier is "common" (case in-sensitive),
+ # and the VALID_ARCHITECTURES comment exists, the list from the VALID_ARCHITECTURES comment
+ # can be used for the attribute.
+ # If both not have VALID_ARCHITECTURE comment and no architecturie specified, then keep it empty.
+ #
+ SupArchList = sorted(ConvertArchList(ItemObj.GetSupArchList()))
+ if len(SupArchList) == 1 and SupArchList[0] == 'COMMON':
+ if not (len(OriSupArchList) == 1 or OriSupArchList[0] == 'COMMON'):
+ SupArchList = OriSupArchList
+ else:
+ SupArchList = ['COMMON']
+
+ FileNameObj = CommonObject.FileNameObject()
+ FileNameObj.SetFileType(ItemObj.GetType())
+ FileNameObj.SetFilename(ItemObj.GetFileName())
+ FileNameObj.SetFeatureFlag(FFE)
+ #
+ # Get GUID value of the GUID CName in the DEC file
+ #
+ if ItemObj.GetType() == DT.SUBTYPE_GUID_BINARY_FILE_TYPE:
+ if not CheckGuidRegFormat(ItemObj.GetGuidValue()):
+ if not DecObjList:
+ if DT.TAB_HORIZON_LINE_SPLIT in ItemObj.GetGuidValue() or \
+ DT.TAB_COMMA_SPLIT in ItemObj.GetGuidValue():
+ Logger.Error("\nMkPkg",
+ FORMAT_INVALID,
+ ST.ERR_DECPARSE_DEFINE_PKGGUID,
+ ExtraData=ItemObj.GetGuidValue(),
+ RaiseError=True)
+ else:
+ Logger.Error("\nMkPkg",
+ FORMAT_INVALID,
+ ST.ERR_UNI_SUBGUID_VALUE_DEFINE_DEC_NOT_FOUND % \
+ (ItemObj.GetGuidValue()),
+ RaiseError=True)
+ else:
+ for DecObj in DecObjList:
+ for GuidObj in DecObj.GetGuidList():
+ if GuidObj.GetCName() == ItemObj.GetGuidValue():
+ FileNameObj.SetGuidValue(GuidObj.GetGuid())
+ break
+
+ if not FileNameObj.GetGuidValue():
+ Logger.Error("\nMkPkg",
+ FORMAT_INVALID,
+ ST.ERR_DECPARSE_CGUID_NOT_FOUND % \
+ (ItemObj.GetGuidValue()),
+ RaiseError=True)
+ else:
+ FileNameObj.SetGuidValue(ItemObj.GetGuidValue().strip())
+
+ FileNameObj.SetSupArchList(SupArchList)
+ FileNameList = [FileNameObj]
+
+ BinaryFile = BinaryFileObject()
+ BinaryFile.SetFileNameList(FileNameList)
+ BinaryFile.SetAsBuiltList(AsBuildIns)
+ BinaryFileObjectList.append(BinaryFile)
+
+ SupArchStr = ' '.join(SupArchList)
+ Key = (ItemObj.GetFileName(), ItemObj.GetType(), FFE, SupArchStr)
+ ValueItem = (ItemObj.GetTarget(), Family, TagName, '')
+ if Key in BinariesDict:
+ ValueList = BinariesDict[Key]
+ ValueList.append(ValueItem)
+ BinariesDict[Key] = ValueList
+ else:
+ BinariesDict[Key] = [ValueItem]
+
+ return BinariesDict, AsBuildIns, BinaryFileObjectList
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/__init__.py
new file mode 100644
index 00000000..0df484ce
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/PomAdapter/__init__.py
@@ -0,0 +1,14 @@
+## @file
+# Python 'Parser' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+PomAdapter
+'''
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/ReplacePkg.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/ReplacePkg.py
new file mode 100755
index 00000000..4152b25f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/ReplacePkg.py
@@ -0,0 +1,142 @@
+## @file
+# Replace distribution package.
+#
+# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+"""
+Replace a distribution package
+"""
+##
+# Import Modules
+#
+from shutil import rmtree
+from traceback import format_exc
+from platform import python_version
+from sys import platform
+from Logger import StringTable as ST
+from Logger.ToolError import UNKNOWN_ERROR
+from Logger.ToolError import FatalError
+from Logger.ToolError import ABORT_ERROR
+from Logger.ToolError import CODE_ERROR
+from Logger.ToolError import UPT_ALREADY_INSTALLED_ERROR
+import Logger.Log as Logger
+
+from Core.DependencyRules import DependencyRules
+from Library import GlobalData
+from InstallPkg import UnZipDp
+from InstallPkg import InstallDp
+from RmPkg import GetInstalledDpInfo
+from RmPkg import RemoveDist
+
+## Tool entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @param Options: command Options
+#
+def Main(Options = None):
+ ContentZipFile, DistFile = None, None
+ try:
+ DataBase = GlobalData.gDB
+ WorkspaceDir = GlobalData.gWORKSPACE
+ Dep = DependencyRules(DataBase)
+ DistPkg, ContentZipFile, DpPkgFileName, DistFile = UnZipDp(WorkspaceDir, Options.PackFileToReplace)
+
+ StoredDistFile, OrigDpGuid, OrigDpVersion = GetInstalledDpInfo(Options.PackFileToBeReplaced, \
+ Dep, DataBase, WorkspaceDir)
+
+ #
+ # check dependency
+ #
+ CheckReplaceDpx(Dep, DistPkg, OrigDpGuid, OrigDpVersion)
+
+ #
+ # Remove the old distribution
+ #
+ RemoveDist(OrigDpGuid, OrigDpVersion, StoredDistFile, DataBase, WorkspaceDir, Options.Yes)
+
+ #
+ # Install the new distribution
+ #
+ InstallDp(DistPkg, DpPkgFileName, ContentZipFile, Options, Dep, WorkspaceDir, DataBase)
+ ReturnCode = 0
+
+ except FatalError as XExcept:
+ ReturnCode = XExcept.args[0]
+ if Logger.GetLevel() <= Logger.DEBUG_9:
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(),
+ platform) + format_exc())
+ except KeyboardInterrupt:
+ ReturnCode = ABORT_ERROR
+ if Logger.GetLevel() <= Logger.DEBUG_9:
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(),
+ platform) + format_exc())
+ except:
+ ReturnCode = CODE_ERROR
+ Logger.Error(
+ "\nReplacePkg",
+ CODE_ERROR,
+ ST.ERR_UNKNOWN_FATAL_REPLACE_ERR % (Options.PackFileToReplace, Options.PackFileToBeReplaced),
+ ExtraData=ST.MSG_SEARCH_FOR_HELP % ST.MSG_EDKII_MAIL_ADDR,
+ RaiseError=False
+ )
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(),
+ platform) + format_exc())
+
+ finally:
+ Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_STARTED)
+ if DistFile:
+ DistFile.Close()
+ if ContentZipFile:
+ ContentZipFile.Close()
+ for TempDir in GlobalData.gUNPACK_DIR:
+ rmtree(TempDir)
+ GlobalData.gUNPACK_DIR = []
+ Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_DONE)
+
+ if ReturnCode == 0:
+ Logger.Quiet(ST.MSG_FINISH)
+
+ return ReturnCode
+
+def CheckReplaceDpx(Dep, DistPkg, OrigDpGuid, OrigDpVersion):
+ NewDpPkgList = []
+ for PkgInfo in DistPkg.PackageSurfaceArea:
+ Guid, Version = PkgInfo[0], PkgInfo[1]
+ NewDpPkgList.append((Guid, Version))
+
+ NewDpInfo = "%s %s" % (DistPkg.Header.GetGuid(), DistPkg.Header.GetVersion())
+ OrigDpInfo = "%s %s" % (OrigDpGuid, OrigDpVersion)
+
+ #
+ # check whether new distribution is already installed and not replacing itself
+ #
+ if (NewDpInfo != OrigDpInfo):
+ if Dep.CheckDpExists(DistPkg.Header.GetGuid(), DistPkg.Header.GetVersion()):
+ Logger.Error("\nReplacePkg", UPT_ALREADY_INSTALLED_ERROR,
+ ST.WRN_DIST_PKG_INSTALLED,
+ ExtraData=ST.MSG_REPLACE_ALREADY_INSTALLED_DP)
+
+ #
+ # check whether the original distribution could be replaced by new distribution
+ #
+ Logger.Verbose(ST.MSG_CHECK_DP_FOR_REPLACE%(NewDpInfo, OrigDpInfo))
+ DepInfoResult = Dep.CheckDpDepexForReplace(OrigDpGuid, OrigDpVersion, NewDpPkgList)
+ Replaceable = DepInfoResult[0]
+ if not Replaceable:
+ Logger.Error("\nReplacePkg", UNKNOWN_ERROR,
+ ST.ERR_PACKAGE_NOT_MATCH_DEPENDENCY)
+
+ #
+ # check whether new distribution could be installed by dependency rule
+ #
+ Logger.Verbose(ST.MSG_CHECK_DP_FOR_INSTALL%str(NewDpInfo))
+ if not Dep.ReplaceCheckNewDpDepex(DistPkg, OrigDpGuid, OrigDpVersion):
+ Logger.Error("\nReplacePkg", UNKNOWN_ERROR,
+ ST.ERR_PACKAGE_NOT_MATCH_DEPENDENCY,
+ ExtraData=DistPkg.Header.Name)
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/RmPkg.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/RmPkg.py
new file mode 100755
index 00000000..98e0fcd6
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/RmPkg.py
@@ -0,0 +1,270 @@
+## @file
+# Install distribution package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+RmPkg
+'''
+
+##
+# Import Modules
+#
+import os.path
+from stat import S_IWUSR
+from traceback import format_exc
+from platform import python_version
+from hashlib import md5
+from sys import stdin
+from sys import platform
+
+from Core.DependencyRules import DependencyRules
+from Library import GlobalData
+from Logger import StringTable as ST
+import Logger.Log as Logger
+from Logger.ToolError import OPTION_MISSING
+from Logger.ToolError import UNKNOWN_ERROR
+from Logger.ToolError import ABORT_ERROR
+from Logger.ToolError import CODE_ERROR
+from Logger.ToolError import FatalError
+
+
+## CheckDpDepex
+#
+# Check if the Depex is satisfied
+# @param Dep: Dep
+# @param Guid: Guid of Dp
+# @param Version: Version of Dp
+# @param WorkspaceDir: Workspace Dir
+#
+def CheckDpDepex(Dep, Guid, Version, WorkspaceDir):
+ (Removable, DependModuleList) = Dep.CheckDpDepexForRemove(Guid, Version)
+ if not Removable:
+ Logger.Info(ST.MSG_CONFIRM_REMOVE)
+ Logger.Info(ST.MSG_USER_DELETE_OP)
+ Input = stdin.readline()
+ Input = Input.replace('\r', '').replace('\n', '')
+ if Input.upper() != 'Y':
+ Logger.Error("RmPkg", UNKNOWN_ERROR, ST.ERR_USER_INTERRUPT)
+ return 1
+ else:
+ #
+ # report list of modules that are not valid due to force
+ # remove,
+ # also generate a log file for reference
+ #
+ Logger.Info(ST.MSG_INVALID_MODULE_INTRODUCED)
+ LogFilePath = os.path.normpath(os.path.join(WorkspaceDir, GlobalData.gINVALID_MODULE_FILE))
+ Logger.Info(ST.MSG_CHECK_LOG_FILE % LogFilePath)
+ try:
+ LogFile = open(LogFilePath, 'w')
+ try:
+ for ModulePath in DependModuleList:
+ LogFile.write("%s\n"%ModulePath)
+ Logger.Info(ModulePath)
+ except IOError:
+ Logger.Warn("\nRmPkg", ST.ERR_FILE_WRITE_FAILURE,
+ File=LogFilePath)
+ except IOError:
+ Logger.Warn("\nRmPkg", ST.ERR_FILE_OPEN_FAILURE,
+ File=LogFilePath)
+ finally:
+ LogFile.close()
+
+## Remove Path
+#
+# removing readonly file on windows will get "Access is denied"
+# error, so before removing, change the mode to be writeable
+#
+# @param Path: The Path to be removed
+#
+def RemovePath(Path):
+ Logger.Info(ST.MSG_REMOVE_FILE % Path)
+ if not os.access(Path, os.W_OK):
+ os.chmod(Path, S_IWUSR)
+ os.remove(Path)
+ try:
+ os.removedirs(os.path.split(Path)[0])
+ except OSError:
+ pass
+## GetCurrentFileList
+#
+# @param DataBase: DataBase of UPT
+# @param Guid: Guid of Dp
+# @param Version: Version of Dp
+# @param WorkspaceDir: Workspace Dir
+#
+def GetCurrentFileList(DataBase, Guid, Version, WorkspaceDir):
+ NewFileList = []
+ for Dir in DataBase.GetDpInstallDirList(Guid, Version):
+ RootDir = os.path.normpath(os.path.join(WorkspaceDir, Dir))
+ for Root, Dirs, Files in os.walk(RootDir):
+ Logger.Debug(0, Dirs)
+ for File in Files:
+ FilePath = os.path.join(Root, File)
+ if FilePath not in NewFileList:
+ NewFileList.append(FilePath)
+ return NewFileList
+
+
+## Tool entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @param Options: command option
+#
+def Main(Options = None):
+
+ try:
+ DataBase = GlobalData.gDB
+ if not Options.DistributionFile:
+ Logger.Error("RmPkg",
+ OPTION_MISSING,
+ ExtraData=ST.ERR_SPECIFY_PACKAGE)
+ WorkspaceDir = GlobalData.gWORKSPACE
+ #
+ # Prepare check dependency
+ #
+ Dep = DependencyRules(DataBase)
+
+ #
+ # Get the Dp information
+ #
+ StoredDistFile, Guid, Version = GetInstalledDpInfo(Options.DistributionFile, Dep, DataBase, WorkspaceDir)
+
+ #
+ # Check Dp depex
+ #
+ CheckDpDepex(Dep, Guid, Version, WorkspaceDir)
+
+ #
+ # remove distribution
+ #
+ RemoveDist(Guid, Version, StoredDistFile, DataBase, WorkspaceDir, Options.Yes)
+
+ Logger.Quiet(ST.MSG_FINISH)
+
+ ReturnCode = 0
+
+ except FatalError as XExcept:
+ ReturnCode = XExcept.args[0]
+ if Logger.GetLevel() <= Logger.DEBUG_9:
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + \
+ format_exc())
+ except KeyboardInterrupt:
+ ReturnCode = ABORT_ERROR
+ if Logger.GetLevel() <= Logger.DEBUG_9:
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + \
+ format_exc())
+ except:
+ Logger.Error(
+ "\nRmPkg",
+ CODE_ERROR,
+ ST.ERR_UNKNOWN_FATAL_REMOVING_ERR,
+ ExtraData=ST.MSG_SEARCH_FOR_HELP % ST.MSG_EDKII_MAIL_ADDR,
+ RaiseError=False
+ )
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + \
+ format_exc())
+ ReturnCode = CODE_ERROR
+ return ReturnCode
+
+## GetInstalledDpInfo method
+#
+# Get the installed distribution information
+#
+# @param DistributionFile: the name of the distribution
+# @param Dep: the instance of DependencyRules
+# @param DataBase: the internal database
+# @param WorkspaceDir: work space directory
+# @retval StoredDistFile: the distribution file that backed up
+# @retval Guid: the Guid of the distribution
+# @retval Version: the Version of distribution
+#
+def GetInstalledDpInfo(DistributionFile, Dep, DataBase, WorkspaceDir):
+ (Guid, Version, NewDpFileName) = DataBase.GetDpByName(os.path.split(DistributionFile)[1])
+ if not Guid:
+ Logger.Error("RmPkg", UNKNOWN_ERROR, ST.ERR_PACKAGE_NOT_INSTALLED % DistributionFile)
+
+ #
+ # Check Dp existing
+ #
+ if not Dep.CheckDpExists(Guid, Version):
+ Logger.Error("RmPkg", UNKNOWN_ERROR, ST.ERR_DISTRIBUTION_NOT_INSTALLED)
+ #
+ # Check for Distribution files existence in /conf/upt, if not exist,
+ # Warn user and go on.
+ #
+ StoredDistFile = os.path.normpath(os.path.join(WorkspaceDir, GlobalData.gUPT_DIR, NewDpFileName))
+ if not os.path.isfile(StoredDistFile):
+ Logger.Warn("RmPkg", ST.WRN_DIST_NOT_FOUND%StoredDistFile)
+ StoredDistFile = None
+
+ return StoredDistFile, Guid, Version
+
+## RemoveDist method
+#
+# remove a distribution
+#
+# @param Guid: the Guid of the distribution
+# @param Version: the Version of distribution
+# @param StoredDistFile: the distribution file that backed up
+# @param DataBase: the internal database
+# @param WorkspaceDir: work space directory
+# @param ForceRemove: whether user want to remove file even it is modified
+#
+def RemoveDist(Guid, Version, StoredDistFile, DataBase, WorkspaceDir, ForceRemove):
+ #
+ # Get Current File List
+ #
+ NewFileList = GetCurrentFileList(DataBase, Guid, Version, WorkspaceDir)
+
+ #
+ # Remove all files
+ #
+ MissingFileList = []
+ for (Path, Md5Sum) in DataBase.GetDpFileList(Guid, Version):
+ if os.path.isfile(Path):
+ if Path in NewFileList:
+ NewFileList.remove(Path)
+ if not ForceRemove:
+ #
+ # check whether modified by users
+ #
+ Md5Signature = md5(open(str(Path), 'rb').read())
+ if Md5Sum != Md5Signature.hexdigest():
+ Logger.Info(ST.MSG_CONFIRM_REMOVE2 % Path)
+ Input = stdin.readline()
+ Input = Input.replace('\r', '').replace('\n', '')
+ if Input.upper() != 'Y':
+ continue
+ RemovePath(Path)
+ else:
+ MissingFileList.append(Path)
+
+ for Path in NewFileList:
+ if os.path.isfile(Path):
+ if (not ForceRemove) and (not os.path.split(Path)[1].startswith('.')):
+ Logger.Info(ST.MSG_CONFIRM_REMOVE3 % Path)
+ Input = stdin.readline()
+ Input = Input.replace('\r', '').replace('\n', '')
+ if Input.upper() != 'Y':
+ continue
+ RemovePath(Path)
+
+ #
+ # Remove distribution files in /Conf/.upt
+ #
+ if StoredDistFile is not None:
+ os.remove(StoredDistFile)
+
+ #
+ # update database
+ #
+ Logger.Quiet(ST.MSG_UPDATE_PACKAGE_DATABASE)
+ DataBase.RemoveDpObj(Guid, Version)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/TestInstall.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/TestInstall.py
new file mode 100755
index 00000000..b95449e5
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/TestInstall.py
@@ -0,0 +1,94 @@
+# # @file
+# Test Install distribution package
+#
+# Copyright (c) 2016 - 2017, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+"""
+Test Install multiple distribution package
+"""
+# #
+# Import Modules
+#
+from Library import GlobalData
+import Logger.Log as Logger
+from Logger import StringTable as ST
+import Logger.ToolError as TE
+from Core.DependencyRules import DependencyRules
+from InstallPkg import UnZipDp
+
+import shutil
+from traceback import format_exc
+from platform import python_version
+from sys import platform
+
+# # Tool entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @param Options: command Options
+#
+def Main(Options=None):
+ ContentZipFile, DistFile = None, None
+ ReturnCode = 0
+
+ try:
+ DataBase = GlobalData.gDB
+ WorkspaceDir = GlobalData.gWORKSPACE
+ if not Options.DistFiles:
+ Logger.Error("TestInstallPkg", TE.OPTION_MISSING, ExtraData=ST.ERR_SPECIFY_PACKAGE)
+
+ DistPkgList = []
+ for DistFile in Options.DistFiles:
+ DistPkg, ContentZipFile, __, DistFile = UnZipDp(WorkspaceDir, DistFile)
+ DistPkgList.append(DistPkg)
+
+ #
+ # check dependency
+ #
+ Dep = DependencyRules(DataBase)
+ Result = True
+ DpObj = None
+ try:
+ Result, DpObj = Dep.CheckTestInstallPdDepexSatisfied(DistPkgList)
+ except:
+ Result = False
+
+ if Result:
+ Logger.Quiet(ST.MSG_TEST_INSTALL_PASS)
+ else:
+ Logger.Quiet(ST.MSG_TEST_INSTALL_FAIL)
+
+ except TE.FatalError as XExcept:
+ ReturnCode = XExcept.args[0]
+ if Logger.GetLevel() <= Logger.DEBUG_9:
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
+
+ except Exception as x:
+ ReturnCode = TE.CODE_ERROR
+ Logger.Error(
+ "\nTestInstallPkg",
+ TE.CODE_ERROR,
+ ST.ERR_UNKNOWN_FATAL_INSTALL_ERR % Options.DistFiles,
+ ExtraData=ST.MSG_SEARCH_FOR_HELP % ST.MSG_EDKII_MAIL_ADDR,
+ RaiseError=False
+ )
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
+
+ finally:
+ Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_STARTED)
+ if DistFile:
+ DistFile.Close()
+ if ContentZipFile:
+ ContentZipFile.Close()
+ for TempDir in GlobalData.gUNPACK_DIR:
+ shutil.rmtree(TempDir)
+ GlobalData.gUNPACK_DIR = []
+ Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_DONE)
+ if ReturnCode == 0:
+ Logger.Quiet(ST.MSG_FINISH)
+ return ReturnCode
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UPT.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UPT.py
new file mode 100755
index 00000000..693e01e0
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UPT.py
@@ -0,0 +1,347 @@
+## @file
+#
+# This file is the main entry for UPT
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+UPT
+'''
+
+## import modules
+#
+import locale
+import sys
+from imp import reload
+encoding = locale.getdefaultlocale()[1]
+if encoding:
+ reload(sys)
+ sys.setdefaultencoding(encoding)
+from Core import FileHook
+import os.path
+from sys import platform
+import platform as pf
+from optparse import OptionParser
+from traceback import format_exc
+from platform import python_version
+
+from Logger import StringTable as ST
+import Logger.Log as Logger
+from Logger.StringTable import MSG_VERSION
+from Logger.StringTable import MSG_DESCRIPTION
+from Logger.StringTable import MSG_USAGE
+from Logger.ToolError import FILE_NOT_FOUND
+from Logger.ToolError import OPTION_MISSING
+from Logger.ToolError import FILE_TYPE_MISMATCH
+from Logger.ToolError import OPTION_CONFLICT
+from Logger.ToolError import FatalError
+from Logger.ToolError import UPT_ALREADY_INSTALLED_ERROR
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+
+import MkPkg
+import InstallPkg
+import RmPkg
+import InventoryWs
+import ReplacePkg
+import TestInstall
+from Library.Misc import GetWorkspace
+from Library import GlobalData
+from Core.IpiDb import IpiDatabase
+from BuildVersion import gBUILD_VERSION
+
+## CheckConflictOption
+#
+# CheckConflictOption
+#
+def CheckConflictOption(Opt):
+ if (Opt.PackFileToCreate or Opt.PackFileToInstall or Opt.PackFileToRemove or Opt.PackFileToReplace) \
+ and Opt.InventoryWs:
+ Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_L_OA_EXCLUSIVE)
+ elif Opt.PackFileToReplace and (Opt.PackFileToCreate or Opt.PackFileToInstall or Opt.PackFileToRemove):
+ Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_U_ICR_EXCLUSIVE)
+ elif (Opt.PackFileToCreate and Opt.PackFileToInstall and Opt.PackFileToRemove):
+ Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_REQUIRE_I_C_R_OPTION)
+ elif Opt.PackFileToCreate and Opt.PackFileToInstall:
+ Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_I_C_EXCLUSIVE)
+ elif Opt.PackFileToInstall and Opt.PackFileToRemove:
+ Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_I_R_EXCLUSIVE)
+ elif Opt.PackFileToCreate and Opt.PackFileToRemove:
+ Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_C_R_EXCLUSIVE)
+ elif Opt.TestDistFiles and (Opt.PackFileToCreate or Opt.PackFileToInstall \
+ or Opt.PackFileToRemove or Opt.PackFileToReplace):
+ Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_C_R_EXCLUSIVE)
+
+ if Opt.CustomPath and Opt.UseGuidedPkgPath:
+ Logger.Warn("UPT", ST.WARN_CUSTOMPATH_OVERRIDE_USEGUIDEDPATH)
+ Opt.UseGuidedPkgPath = False
+
+## SetLogLevel
+#
+def SetLogLevel(Opt):
+ if Opt.opt_verbose:
+ Logger.SetLevel(Logger.VERBOSE)
+ elif Opt.opt_quiet:
+ Logger.SetLevel(Logger.QUIET + 1)
+ elif Opt.debug_level is not None:
+ if Opt.debug_level < 0 or Opt.debug_level > 9:
+ Logger.Warn("UPT", ST.ERR_DEBUG_LEVEL)
+ Logger.SetLevel(Logger.INFO)
+ else:
+ Logger.SetLevel(Opt.debug_level + 1)
+ elif Opt.opt_slient:
+ Logger.SetLevel(Logger.SILENT)
+ else:
+ Logger.SetLevel(Logger.INFO)
+
+## Main
+#
+# Main
+#
+def Main():
+ Logger.Initialize()
+
+ Parser = OptionParser(version=(MSG_VERSION + ' Build ' + gBUILD_VERSION), description=MSG_DESCRIPTION,
+ prog="UPT.exe", usage=MSG_USAGE)
+
+ Parser.add_option("-d", "--debug", action="store", type="int", dest="debug_level", help=ST.HLP_PRINT_DEBUG_INFO)
+
+ Parser.add_option("-v", "--verbose", action="store_true", dest="opt_verbose",
+ help=ST.HLP_PRINT_INFORMATIONAL_STATEMENT)
+
+ Parser.add_option("-s", "--silent", action="store_true", dest="opt_slient", help=ST.HLP_RETURN_NO_DISPLAY)
+
+ Parser.add_option("-q", "--quiet", action="store_true", dest="opt_quiet", help=ST.HLP_RETURN_AND_DISPLAY)
+
+ Parser.add_option("-i", "--install", action="append", type="string", dest="Install_Distribution_Package_File",
+ help=ST.HLP_SPECIFY_PACKAGE_NAME_INSTALL)
+
+ Parser.add_option("-c", "--create", action="store", type="string", dest="Create_Distribution_Package_File",
+ help=ST.HLP_SPECIFY_PACKAGE_NAME_CREATE)
+
+ Parser.add_option("-r", "--remove", action="store", type="string", dest="Remove_Distribution_Package_File",
+ help=ST.HLP_SPECIFY_PACKAGE_NAME_REMOVE)
+
+ Parser.add_option("-t", "--template", action="store", type="string", dest="Package_Information_Data_File",
+ help=ST.HLP_SPECIFY_TEMPLATE_NAME_CREATE)
+
+ Parser.add_option("-p", "--dec-filename", action="append", type="string", dest="EDK2_DEC_Filename",
+ help=ST.HLP_SPECIFY_DEC_NAME_CREATE)
+
+ Parser.add_option("-m", "--inf-filename", action="append", type="string", dest="EDK2_INF_Filename",
+ help=ST.HLP_SPECIFY_INF_NAME_CREATE)
+
+ Parser.add_option("-l", "--list", action="store_true", dest="List_Dist_Installed",
+ help=ST.HLP_LIST_DIST_INSTALLED)
+
+ Parser.add_option("-f", "--force", action="store_true", dest="Yes", help=ST.HLP_DISABLE_PROMPT)
+
+ Parser.add_option("-n", "--custom-path", action="store_true", dest="CustomPath", help=ST.HLP_CUSTOM_PATH_PROMPT)
+
+ Parser.add_option("-x", "--free-lock", action="store_true", dest="SkipLock", help=ST.HLP_SKIP_LOCK_CHECK)
+
+ Parser.add_option("-u", "--replace", action="store", type="string", dest="Replace_Distribution_Package_File",
+ help=ST.HLP_SPECIFY_PACKAGE_NAME_REPLACE)
+
+ Parser.add_option("-o", "--original", action="store", type="string", dest="Original_Distribution_Package_File",
+ help=ST.HLP_SPECIFY_PACKAGE_NAME_TO_BE_REPLACED)
+
+ Parser.add_option("--use-guided-paths", action="store_true", dest="Use_Guided_Paths", help=ST.HLP_USE_GUIDED_PATHS)
+
+ Parser.add_option("-j", "--test-install", action="append", type="string",
+ dest="Test_Install_Distribution_Package_Files", help=ST.HLP_TEST_INSTALL)
+
+ Opt = Parser.parse_args()[0]
+
+ Var2Var = [
+ ("PackageInformationDataFile", Opt.Package_Information_Data_File),
+ ("PackFileToInstall", Opt.Install_Distribution_Package_File),
+ ("PackFileToCreate", Opt.Create_Distribution_Package_File),
+ ("PackFileToRemove", Opt.Remove_Distribution_Package_File),
+ ("PackageFileList", Opt.EDK2_DEC_Filename),
+ ("ModuleFileList", Opt.EDK2_INF_Filename),
+ ("InventoryWs", Opt.List_Dist_Installed),
+ ("PackFileToReplace", Opt.Replace_Distribution_Package_File),
+ ("PackFileToBeReplaced", Opt.Original_Distribution_Package_File),
+ ("UseGuidedPkgPath", Opt.Use_Guided_Paths),
+ ("TestDistFiles", Opt.Test_Install_Distribution_Package_Files)
+ ]
+
+ for Var in Var2Var:
+ setattr(Opt, Var[0], Var[1])
+
+ try:
+ GlobalData.gWORKSPACE, GlobalData.gPACKAGE_PATH = GetWorkspace()
+ except FatalError as XExcept:
+ if Logger.GetLevel() <= Logger.DEBUG_9:
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
+ return XExcept.args[0]
+
+ # Support WORKSPACE is a long path
+ # Only works for windows system
+ if pf.system() == 'Windows':
+ Vol = 'B:'
+ for Index in range(90, 65, -1):
+ Vol = chr(Index) + ':'
+ if not os.path.isdir(Vol):
+ os.system('subst %s "%s"' % (Vol, GlobalData.gWORKSPACE))
+ break
+ GlobalData.gWORKSPACE = '%s\\' % Vol
+
+ WorkspaceDir = GlobalData.gWORKSPACE
+
+ SetLogLevel(Opt)
+
+ Mgr = FileHook.RecoverMgr(WorkspaceDir)
+ FileHook.SetRecoverMgr(Mgr)
+
+ GlobalData.gDB = IpiDatabase(os.path.normpath(os.path.join(WorkspaceDir, \
+ "Conf/DistributionPackageDatabase.db")), WorkspaceDir)
+ GlobalData.gDB.InitDatabase(Opt.SkipLock)
+
+ #
+ # Make sure the Db will get closed correctly
+ #
+ try:
+ ReturnCode = 0
+ CheckConflictOption(Opt)
+
+ RunModule = None
+ if Opt.PackFileToCreate:
+ if Opt.PackageInformationDataFile:
+ if not os.path.exists(Opt.PackageInformationDataFile):
+ if not os.path.exists(os.path.join(WorkspaceDir, Opt.PackageInformationDataFile)):
+ Logger.Error("\nUPT", FILE_NOT_FOUND, ST.ERR_NO_TEMPLATE_FILE % Opt.PackageInformationDataFile)
+ else:
+ Opt.PackageInformationDataFile = os.path.join(WorkspaceDir, Opt.PackageInformationDataFile)
+ else:
+ Logger.Error("UPT", OPTION_MISSING, ExtraData=ST.ERR_REQUIRE_T_OPTION)
+ if not Opt.PackFileToCreate.endswith('.dist'):
+ Logger.Error("CreatePkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Opt.PackFileToCreate)
+ RunModule = MkPkg.Main
+
+ elif Opt.PackFileToInstall:
+ AbsPath = []
+ for Item in Opt.PackFileToInstall:
+ if not Item.endswith('.dist'):
+ Logger.Error("InstallPkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Item)
+
+ AbsPath.append(GetFullPathDist(Item, WorkspaceDir))
+ if not AbsPath:
+ Logger.Error("InstallPkg", FILE_NOT_FOUND, ST.ERR_INSTALL_DIST_NOT_FOUND % Item)
+
+ Opt.PackFileToInstall = AbsPath
+ setattr(Opt, 'PackageFile', Opt.PackFileToInstall)
+ RunModule = InstallPkg.Main
+
+ elif Opt.PackFileToRemove:
+ if not Opt.PackFileToRemove.endswith('.dist'):
+ Logger.Error("RemovePkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Opt.PackFileToRemove)
+ head, tail = os.path.split(Opt.PackFileToRemove)
+ if head or not tail:
+ Logger.Error("RemovePkg",
+ FILE_TYPE_MISMATCH,
+ ExtraData=ST.ERR_DIST_FILENAME_ONLY_FOR_REMOVE % Opt.PackFileToRemove)
+
+ setattr(Opt, 'DistributionFile', Opt.PackFileToRemove)
+ RunModule = RmPkg.Main
+ elif Opt.InventoryWs:
+ RunModule = InventoryWs.Main
+
+ elif Opt.PackFileToBeReplaced and not Opt.PackFileToReplace:
+ Logger.Error("ReplacePkg", OPTION_MISSING, ExtraData=ST.ERR_REQUIRE_U_OPTION)
+
+ elif Opt.PackFileToReplace:
+ if not Opt.PackFileToReplace.endswith('.dist'):
+ Logger.Error("ReplacePkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Opt.PackFileToReplace)
+ if not Opt.PackFileToBeReplaced:
+ Logger.Error("ReplacePkg", OPTION_MISSING, ExtraData=ST.ERR_REQUIRE_O_OPTION)
+ if not Opt.PackFileToBeReplaced.endswith('.dist'):
+ Logger.Error("ReplacePkg",
+ FILE_TYPE_MISMATCH,
+ ExtraData=ST.ERR_DIST_EXT_ERROR % Opt.PackFileToBeReplaced)
+
+ head, tail = os.path.split(Opt.PackFileToBeReplaced)
+ if head or not tail:
+ Logger.Error("ReplacePkg",
+ FILE_TYPE_MISMATCH,
+ ExtraData=ST.ERR_DIST_FILENAME_ONLY_FOR_REPLACE_ORIG % Opt.PackFileToBeReplaced)
+
+ AbsPath = GetFullPathDist(Opt.PackFileToReplace, WorkspaceDir)
+ if not AbsPath:
+ Logger.Error("ReplacePkg", FILE_NOT_FOUND, ST.ERR_REPLACE_DIST_NOT_FOUND % Opt.PackFileToReplace)
+
+ Opt.PackFileToReplace = AbsPath
+ RunModule = ReplacePkg.Main
+
+ elif Opt.Test_Install_Distribution_Package_Files:
+ for Dist in Opt.Test_Install_Distribution_Package_Files:
+ if not Dist.endswith('.dist'):
+ Logger.Error("TestInstall", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Dist)
+
+ setattr(Opt, 'DistFiles', Opt.Test_Install_Distribution_Package_Files)
+ RunModule = TestInstall.Main
+
+ else:
+ Parser.print_usage()
+ return OPTION_MISSING
+
+ ReturnCode = RunModule(Opt)
+ except FatalError as XExcept:
+ ReturnCode = XExcept.args[0]
+ if Logger.GetLevel() <= Logger.DEBUG_9:
+ Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + \
+ format_exc())
+ finally:
+ try:
+ if ReturnCode != 0 and ReturnCode != UPT_ALREADY_INSTALLED_ERROR:
+ Logger.Quiet(ST.MSG_RECOVER_START)
+ GlobalData.gDB.RollBack()
+ Mgr.rollback()
+ Logger.Quiet(ST.MSG_RECOVER_DONE)
+ else:
+ GlobalData.gDB.Commit()
+ Mgr.commit()
+ except Exception:
+ Logger.Quiet(ST.MSG_RECOVER_FAIL)
+ GlobalData.gDB.CloseDb()
+
+ if pf.system() == 'Windows':
+ os.system('subst %s /D' % GlobalData.gWORKSPACE.replace('\\', ''))
+
+ return ReturnCode
+
+## GetFullPathDist
+#
+# This function will check DistFile existence, if not absolute path, then try current working directory,
+# then $(WORKSPACE),and return the AbsPath. If file doesn't find, then return None
+#
+# @param DistFile: The distribution file in either relative path or absolute path
+# @param WorkspaceDir: Workspace Directory
+# @return AbsPath: The Absolute path of the distribution file if existed, None else
+#
+def GetFullPathDist(DistFile, WorkspaceDir):
+ if os.path.isabs(DistFile):
+ if not (os.path.exists(DistFile) and os.path.isfile(DistFile)):
+ return None
+ else:
+ return DistFile
+ else:
+ AbsPath = os.path.normpath(os.path.join(os.getcwd(), DistFile))
+ if not (os.path.exists(AbsPath) and os.path.isfile(AbsPath)):
+ AbsPath = os.path.normpath(os.path.join(WorkspaceDir, DistFile))
+ if not (os.path.exists(AbsPath) and os.path.isfile(AbsPath)):
+ return None
+
+ return AbsPath
+
+if __name__ == '__main__':
+ RETVAL = Main()
+ #
+ # 0-127 is a safe return range, and 1 is a standard default error
+ #
+ if RETVAL < 0 or RETVAL > 127:
+ RETVAL = 1
+ sys.exit(RETVAL)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/CommentGeneratingUnitTest.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/CommentGeneratingUnitTest.py
new file mode 100755
index 00000000..7c8a829b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/CommentGeneratingUnitTest.py
@@ -0,0 +1,1414 @@
+## @file
+# This file contain unit test for CommentParsing
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+import os
+import unittest
+
+import Logger.Log as Logger
+from GenMetaFile.GenInfFile import GenGuidSections
+from GenMetaFile.GenInfFile import GenProtocolPPiSections
+from GenMetaFile.GenInfFile import GenPcdSections
+from GenMetaFile.GenInfFile import GenSpecialSections
+from Library.CommentGenerating import GenGenericCommentF
+from Library.CommentGenerating import _GetHelpStr
+from Object.POM.CommonObject import TextObject
+from Object.POM.CommonObject import GuidObject
+from Object.POM.CommonObject import ProtocolObject
+from Object.POM.CommonObject import PpiObject
+from Object.POM.CommonObject import PcdObject
+from Object.POM.ModuleObject import HobObject
+
+from Library.StringUtils import GetSplitValueList
+from Library.DataType import TAB_SPACE_SPLIT
+from Library.DataType import TAB_LANGUAGE_EN_US
+from Library.DataType import TAB_LANGUAGE_ENG
+from Library.DataType import ITEM_UNDEFINED
+from Library.DataType import TAB_INF_FEATURE_PCD
+from Library import GlobalData
+from Library.Misc import CreateDirectory
+
+#
+# Test _GetHelpStr
+#
+class _GetHelpStrTest(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ #
+ # Normal case1: have one help text object with Lang = 'en-US'
+ #
+ def testNormalCase1(self):
+ HelpStr = 'Hello world'
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang(TAB_LANGUAGE_EN_US)
+ HelpTextObj.SetString(HelpStr)
+
+ HelpTextList = [HelpTextObj]
+ Result = _GetHelpStr(HelpTextList)
+ self.assertEqual(Result, HelpStr)
+
+ #
+ # Normal case2: have two help text object with Lang = 'en-US' and other
+ #
+ def testNormalCase2(self):
+ HelpStr = 'Hello world'
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang(TAB_LANGUAGE_ENG)
+ HelpTextObj.SetString(HelpStr)
+
+ HelpTextList = [HelpTextObj]
+
+ ExpectedStr = 'Hello world1'
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang(TAB_LANGUAGE_EN_US)
+ HelpTextObj.SetString(ExpectedStr)
+
+ HelpTextList.append(HelpTextObj)
+
+ Result = _GetHelpStr(HelpTextList)
+ self.assertEqual(Result, ExpectedStr)
+
+ #
+ # Normal case3: have two help text object with Lang = '' and 'eng'
+ #
+ def testNormalCase3(self):
+ HelpStr = 'Hello world'
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang('')
+ HelpTextObj.SetString(HelpStr)
+
+ HelpTextList = [HelpTextObj]
+
+ ExpectedStr = 'Hello world1'
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang(TAB_LANGUAGE_ENG)
+ HelpTextObj.SetString(ExpectedStr)
+
+ HelpTextList.append(HelpTextObj)
+
+ Result = _GetHelpStr(HelpTextList)
+ self.assertEqual(Result, ExpectedStr)
+
+ #
+ # Normal case4: have two help text object with Lang = '' and ''
+ #
+ def testNormalCase4(self):
+
+ ExpectedStr = 'Hello world1'
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang(TAB_LANGUAGE_ENG)
+ HelpTextObj.SetString(ExpectedStr)
+ HelpTextList = [HelpTextObj]
+
+ HelpStr = 'Hello world'
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang('')
+ HelpTextObj.SetString(HelpStr)
+ HelpTextList.append(HelpTextObj)
+
+ Result = _GetHelpStr(HelpTextList)
+ self.assertEqual(Result, ExpectedStr)
+
+ #
+ # Normal case: have three help text object with Lang = '','en', 'en-US'
+ #
+ def testNormalCase5(self):
+
+ ExpectedStr = 'Hello world1'
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang(TAB_LANGUAGE_EN_US)
+ HelpTextObj.SetString(ExpectedStr)
+ HelpTextList = [HelpTextObj]
+
+ HelpStr = 'Hello unknown world'
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang('')
+ HelpTextObj.SetString(HelpStr)
+ HelpTextList.append(HelpTextObj)
+
+ HelpStr = 'Hello mysterious world'
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang('')
+ HelpTextObj.SetString(HelpStr)
+ HelpTextList.append(HelpTextObj)
+
+ Result = _GetHelpStr(HelpTextList)
+ self.assertEqual(Result, ExpectedStr)
+
+ HelpTextList.sort()
+ self.assertEqual(Result, ExpectedStr)
+
+ HelpTextList.sort(reverse=True)
+ self.assertEqual(Result, ExpectedStr)
+
+
+#
+# Test GenGuidSections
+#
+class GenGuidSectionsTest(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ #
+ # This is the API to generate Guid Object to help UnitTest
+ #
+ def GuidFactory(self, CName, FFE, Usage, GuidType, VariableName, HelpStr):
+ Guid = GuidObject()
+ Guid.SetCName(CName)
+ Guid.SetFeatureFlag(FFE)
+ Guid.SetGuidTypeList([GuidType])
+ Guid.SetUsage(Usage)
+ Guid.SetVariableName(VariableName)
+
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang('')
+ HelpTextObj.SetString(HelpStr)
+ Guid.SetHelpTextList([HelpTextObj])
+
+ return Guid
+
+ #
+ # Normal case: have two GuidObject
+ #
+ def testNormalCase1(self):
+ GuidList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'PRODUCES'
+ GuidType = 'Event'
+ VariableName = ''
+ HelpStr = 'Usage comment line 1'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'CONSUMES'
+ GuidType = 'Variable'
+ VariableName = ''
+ HelpStr = 'Usage comment line 2'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ Result = GenGuidSections(GuidList)
+ Expected = '''[Guids]
+## PRODUCES ## Event # Usage comment line 1
+## CONSUMES ## Variable: # Usage comment line 2
+Guid1|FFE1'''
+ self.assertEqual(Result.strip(), Expected)
+
+ #
+ # Normal case: have two GuidObject
+ #
+ def testNormalCase2(self):
+ GuidList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'PRODUCES'
+ GuidType = 'Event'
+ VariableName = ''
+ HelpStr = 'Usage comment line 1'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'UNDEFINED'
+ GuidType = 'UNDEFINED'
+ VariableName = ''
+ HelpStr = 'Generic comment line 1\n Generic comment line 2'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ Result = GenGuidSections(GuidList)
+ Expected = '''[Guids]
+## PRODUCES ## Event # Usage comment line 1
+# Generic comment line 1
+# Generic comment line 2
+Guid1|FFE1'''
+
+ self.assertEqual(Result.strip(), Expected)
+
+ #
+ # Normal case: have two GuidObject, one help goes to generic help,
+ # the other go into usage comment
+ #
+ def testNormalCase3(self):
+ GuidList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'UNDEFINED'
+ GuidType = 'UNDEFINED'
+ VariableName = ''
+ HelpStr = 'Generic comment'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'PRODUCES'
+ GuidType = 'Event'
+ VariableName = ''
+ HelpStr = 'Usage comment line 1'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ Result = GenGuidSections(GuidList)
+ Expected = '''[Guids]
+# Generic comment
+## PRODUCES ## Event # Usage comment line 1
+Guid1|FFE1'''
+
+ self.assertEqual(Result.strip(), Expected)
+
+ #
+ # Normal case: have one GuidObject, generic comment multiple lines
+ #
+ def testNormalCase5(self):
+ GuidList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'UNDEFINED'
+ GuidType = 'UNDEFINED'
+ VariableName = ''
+ HelpStr = 'Generic comment line1 \n generic comment line 2'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ Result = GenGuidSections(GuidList)
+ Expected = '''[Guids]
+# Generic comment line1
+# generic comment line 2
+Guid1|FFE1'''
+
+ self.assertEqual(Result.strip(), Expected)
+
+ #
+ # Normal case: have one GuidObject, usage comment multiple lines
+ #
+ def testNormalCase6(self):
+ GuidList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'PRODUCES'
+ GuidType = 'Event'
+ VariableName = ''
+ HelpStr = 'Usage comment line 1\n Usage comment line 2'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ Result = GenGuidSections(GuidList)
+ Expected = '''[Guids]
+Guid1|FFE1 ## PRODUCES ## Event # Usage comment line 1 Usage comment line 2
+'''
+ self.assertEqual(Result.strip(), Expected.strip())
+
+ #
+ # Normal case: have one GuidObject, usage comment one line
+ #
+ def testNormalCase7(self):
+ GuidList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'UNDEFINED'
+ GuidType = 'UNDEFINED'
+ VariableName = ''
+ HelpStr = 'Usage comment line 1'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ Result = GenGuidSections(GuidList)
+ Expected = '''[Guids]
+Guid1|FFE1 # Usage comment line 1
+'''
+ self.assertEqual(Result.strip(), Expected.strip())
+
+ #
+ # Normal case: have two GuidObject
+ #
+ def testNormalCase8(self):
+ GuidList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'PRODUCES'
+ GuidType = 'Event'
+ VariableName = ''
+ HelpStr = 'Usage comment line 1\n Usage comment line 2'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'PRODUCES'
+ GuidType = 'Event'
+ VariableName = ''
+ HelpStr = 'Usage comment line 3'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ Result = GenGuidSections(GuidList)
+ Expected = '''[Guids]
+## PRODUCES ## Event # Usage comment line 1 Usage comment line 2
+## PRODUCES ## Event # Usage comment line 3
+Guid1|FFE1
+'''
+ self.assertEqual(Result.strip(), Expected.strip())
+
+ #
+ # Normal case: have no GuidObject
+ #
+ def testNormalCase9(self):
+ GuidList = []
+
+ Result = GenGuidSections(GuidList)
+ Expected = ''
+ self.assertEqual(Result.strip(), Expected.strip())
+
+ #
+ # Normal case: have one GuidObject with no comment generated
+ #
+ def testNormalCase10(self):
+ GuidList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'UNDEFINED'
+ GuidType = 'UNDEFINED'
+ VariableName = ''
+ HelpStr = ''
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ Result = GenGuidSections(GuidList)
+ Expected = '''[Guids]
+Guid1|FFE1
+'''
+ self.assertEqual(Result.strip(), Expected.strip())
+
+ #
+ # Normal case: have three GuidObject
+ #
+ def testNormalCase11(self):
+ GuidList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'UNDEFINED'
+ GuidType = 'UNDEFINED'
+ VariableName = ''
+ HelpStr = 'general comment line 1'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'PRODUCES'
+ GuidType = 'Event'
+ VariableName = ''
+ HelpStr = 'Usage comment line 3'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'UNDEFINED'
+ GuidType = 'UNDEFINED'
+ VariableName = ''
+ HelpStr = 'general comment line 2'
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ Result = GenGuidSections(GuidList)
+ Expected = '''[Guids]
+# general comment line 1
+## PRODUCES ## Event # Usage comment line 3
+# general comment line 2
+Guid1|FFE1
+'''
+ self.assertEqual(Result.strip(), Expected.strip())
+
+ #
+ # Normal case: have three GuidObject, with Usage/Type and no help
+ #
+ def testNormalCase12(self):
+ GuidList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'PRODUCES'
+ GuidType = 'GUID'
+ VariableName = ''
+ HelpStr = ''
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'PRODUCES'
+ GuidType = 'Event'
+ VariableName = ''
+ HelpStr = ''
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+ Usage = 'CONSUMES'
+ GuidType = 'Event'
+ VariableName = ''
+ HelpStr = ''
+ Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
+ VariableName, HelpStr)
+ GuidList.append(Guid1)
+
+ Result = GenGuidSections(GuidList)
+ Expected = '''[Guids]
+## PRODUCES ## GUID
+## PRODUCES ## Event
+## CONSUMES ## Event
+Guid1|FFE1
+'''
+ self.assertEqual(Result.strip(), Expected.strip())
+
+#
+# Test GenProtocolPPiSections
+#
+class GenProtocolPPiSectionsTest(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ #
+ # This is the API to generate Protocol/Ppi Object to help UnitTest
+ #
+ def ObjectFactory(self, CName, FFE, Usage, Notify, HelpStr, IsProtocol):
+ if IsProtocol:
+ Object = ProtocolObject()
+ else:
+ Object = PpiObject()
+
+ Object.SetCName(CName)
+ Object.SetFeatureFlag(FFE)
+ Object.SetUsage(Usage)
+ Object.SetNotify(Notify)
+
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang('')
+ HelpTextObj.SetString(HelpStr)
+ Object.SetHelpTextList([HelpTextObj])
+
+ return Object
+
+ # Usage Notify Help INF Comment
+ #1 UNDEFINED true Present ## UNDEFINED ## NOTIFY # Help
+ #2 UNDEFINED true Not Present ## UNDEFINED ## NOTIFY
+ #3 UNDEFINED false Present ## UNDEFINED # Help
+ #4 UNDEFINED false Not Present ## UNDEFINED
+ #5 UNDEFINED Not Present Present # Help
+ #6 UNDEFINED Not Present Not Present <empty>
+ #7 Other true Present ## Other ## NOTIFY # Help
+ #8 Other true Not Present ## Other ## NOTIFY
+ #9 Other false Present ## Other # Help
+ #A Other false Not Present ## Other
+ #B Other Not Present Present ## Other # Help
+ #C Other Not Present Not Present ## Other
+
+ def testNormalCase1(self):
+ ObjectList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+
+ Usage = 'UNDEFINED'
+ Notify = True
+ HelpStr = 'Help'
+ IsProtocol = True
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ HelpStr, IsProtocol)
+ ObjectList.append(Object)
+
+
+ Result = GenProtocolPPiSections(ObjectList, IsProtocol)
+ Expected = '''[Protocols]
+Guid1|FFE1 ## UNDEFINED ## NOTIFY # Help'''
+ self.assertEqual(Result.strip(), Expected)
+
+ IsProtocol = False
+ ObjectList = []
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ HelpStr, IsProtocol)
+ ObjectList.append(Object)
+
+
+ Result = GenProtocolPPiSections(ObjectList, IsProtocol)
+ Expected = '''[Ppis]
+Guid1|FFE1 ## UNDEFINED ## NOTIFY # Help'''
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCase2(self):
+ ObjectList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+
+ Usage = 'UNDEFINED'
+ Notify = True
+ HelpStr = ''
+ IsProtocol = True
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ HelpStr, IsProtocol)
+ ObjectList.append(Object)
+
+
+ Result = GenProtocolPPiSections(ObjectList, IsProtocol)
+ Expected = '''[Protocols]
+Guid1|FFE1 ## UNDEFINED ## NOTIFY'''
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCase3(self):
+ ObjectList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+
+ Usage = 'UNDEFINED'
+ Notify = False
+ HelpStr = 'Help'
+ IsProtocol = True
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ HelpStr, IsProtocol)
+ ObjectList.append(Object)
+
+
+ Result = GenProtocolPPiSections(ObjectList, IsProtocol)
+ Expected = '''[Protocols]
+Guid1|FFE1 ## UNDEFINED # Help'''
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCase4(self):
+ ObjectList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+
+ Usage = 'UNDEFINED'
+ Notify = False
+ HelpStr = ''
+ IsProtocol = True
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ HelpStr, IsProtocol)
+ ObjectList.append(Object)
+
+
+ Result = GenProtocolPPiSections(ObjectList, IsProtocol)
+ Expected = '''[Protocols]
+Guid1|FFE1 ## UNDEFINED'''
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCase5(self):
+ ObjectList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+
+ Usage = 'UNDEFINED'
+ Notify = ''
+ HelpStr = 'Help'
+ IsProtocol = True
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ HelpStr, IsProtocol)
+ ObjectList.append(Object)
+
+
+ Result = GenProtocolPPiSections(ObjectList, IsProtocol)
+ Expected = '''[Protocols]
+Guid1|FFE1 # Help'''
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCase6(self):
+ ObjectList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+
+ Usage = 'UNDEFINED'
+ Notify = ''
+ HelpStr = ''
+ IsProtocol = True
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ HelpStr, IsProtocol)
+ ObjectList.append(Object)
+
+
+ Result = GenProtocolPPiSections(ObjectList, IsProtocol)
+ Expected = '''[Protocols]
+Guid1|FFE1'''
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCase7(self):
+ ObjectList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+
+ Usage = 'PRODUCES'
+ Notify = True
+ HelpStr = 'Help'
+ IsProtocol = True
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ HelpStr, IsProtocol)
+ ObjectList.append(Object)
+
+
+ Result = GenProtocolPPiSections(ObjectList, IsProtocol)
+ Expected = '''[Protocols]
+Guid1|FFE1 ## PRODUCES ## NOTIFY # Help'''
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCase8(self):
+ ObjectList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+
+ Usage = 'PRODUCES'
+ Notify = True
+ HelpStr = ''
+ IsProtocol = True
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ HelpStr, IsProtocol)
+ ObjectList.append(Object)
+
+
+ Result = GenProtocolPPiSections(ObjectList, IsProtocol)
+ Expected = '''[Protocols]
+Guid1|FFE1 ## PRODUCES ## NOTIFY'''
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCase9(self):
+ ObjectList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+
+ Usage = 'PRODUCES'
+ Notify = False
+ HelpStr = 'Help'
+ IsProtocol = True
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ HelpStr, IsProtocol)
+ ObjectList.append(Object)
+
+
+ Result = GenProtocolPPiSections(ObjectList, IsProtocol)
+ Expected = '''[Protocols]
+Guid1|FFE1 ## PRODUCES # Help'''
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCaseA(self):
+ ObjectList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+
+ Usage = 'PRODUCES'
+ Notify = False
+ HelpStr = ''
+ IsProtocol = True
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ HelpStr, IsProtocol)
+ ObjectList.append(Object)
+
+
+ Result = GenProtocolPPiSections(ObjectList, IsProtocol)
+ Expected = '''[Protocols]
+Guid1|FFE1 ## PRODUCES'''
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCaseB(self):
+ ObjectList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+
+ Usage = 'PRODUCES'
+ Notify = ''
+ HelpStr = 'Help'
+ IsProtocol = True
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ HelpStr, IsProtocol)
+ ObjectList.append(Object)
+
+
+ Result = GenProtocolPPiSections(ObjectList, IsProtocol)
+ Expected = '''[Protocols]
+Guid1|FFE1 ## PRODUCES # Help'''
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCaseC(self):
+ ObjectList = []
+
+ CName = 'Guid1'
+ FFE = 'FFE1'
+
+ Usage = 'PRODUCES'
+ Notify = ''
+ HelpStr = ''
+ IsProtocol = True
+ Object = self.ObjectFactory(CName, FFE, Usage, Notify,
+ HelpStr, IsProtocol)
+ ObjectList.append(Object)
+
+
+ Result = GenProtocolPPiSections(ObjectList, IsProtocol)
+ Expected = '''[Protocols]
+Guid1|FFE1 ## PRODUCES'''
+ self.assertEqual(Result.strip(), Expected)
+
+#
+# Test GenPcdSections
+#
+class GenPcdSectionsTest(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ #
+ # This is the API to generate Pcd Object to help UnitTest
+ #
+ def ObjectFactory(self, ItemType, TSCName, CName, DValue, FFE, Usage, Str):
+ Object = PcdObject()
+ HelpStr = Str
+
+ Object.SetItemType(ItemType)
+ Object.SetTokenSpaceGuidCName(TSCName)
+ Object.SetCName(CName)
+ Object.SetDefaultValue(DValue)
+ Object.SetFeatureFlag(FFE)
+ Object.SetValidUsage(Usage)
+
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang('')
+ HelpTextObj.SetString(HelpStr)
+ Object.SetHelpTextList([HelpTextObj])
+
+ return Object
+
+
+ # Usage Help INF Comment
+ #1 UNDEFINED Present # Help
+ #2 UNDEFINED Not Present <empty>
+ #3 Other Present ## Other # Help
+ #4 Other Not Present ## Other
+
+ def testNormalCase1(self):
+ ObjectList = []
+ ItemType = 'Pcd'
+ TSCName = 'TSCName'
+ CName = 'CName'
+ DValue = 'DValue'
+ FFE = 'FFE'
+
+ Usage = 'UNDEFINED'
+ Str = 'Help'
+
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenPcdSections(ObjectList)
+ Expected = \
+ '[Pcd]\n' + \
+ 'TSCName.CName|DValue|FFE # Help'
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCase2(self):
+ ObjectList = []
+ ItemType = 'Pcd'
+ TSCName = 'TSCName'
+ CName = 'CName'
+ DValue = 'DValue'
+ FFE = 'FFE'
+
+ Usage = 'UNDEFINED'
+ Str = ''
+
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenPcdSections(ObjectList)
+ Expected = '[Pcd]\nTSCName.CName|DValue|FFE'
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCase3(self):
+ ObjectList = []
+ ItemType = 'Pcd'
+ TSCName = 'TSCName'
+ CName = 'CName'
+ DValue = 'DValue'
+ FFE = 'FFE'
+
+ Usage = 'CONSUMES'
+ Str = 'Help'
+
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenPcdSections(ObjectList)
+ Expected = '[Pcd]\nTSCName.CName|DValue|FFE ## CONSUMES # Help'
+ self.assertEqual(Result.strip(), Expected)
+
+ def testNormalCase4(self):
+ ObjectList = []
+ ItemType = 'Pcd'
+ TSCName = 'TSCName'
+ CName = 'CName'
+ DValue = 'DValue'
+ FFE = 'FFE'
+
+ Usage = 'CONSUMES'
+ Str = ''
+
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenPcdSections(ObjectList)
+ Expected = '[Pcd]\nTSCName.CName|DValue|FFE ## CONSUMES'
+ self.assertEqual(Result.strip(), Expected)
+
+ #
+ # multiple lines for normal usage
+ #
+ def testNormalCase5(self):
+ ObjectList = []
+ ItemType = 'Pcd'
+ TSCName = 'TSCName'
+ CName = 'CName'
+ DValue = 'DValue'
+ FFE = 'FFE'
+
+ Usage = 'CONSUMES'
+ Str = 'commment line 1\ncomment line 2'
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenPcdSections(ObjectList)
+ Expected = '''[Pcd]
+TSCName.CName|DValue|FFE ## CONSUMES # commment line 1 comment line 2'''
+ self.assertEqual(Result.strip(), Expected)
+
+ #
+ # multiple lines for UNDEFINED usage
+ #
+ def testNormalCase6(self):
+ ObjectList = []
+ ItemType = 'Pcd'
+ TSCName = 'TSCName'
+ CName = 'CName'
+ DValue = 'DValue'
+ FFE = 'FFE'
+
+ Usage = 'UNDEFINED'
+ Str = 'commment line 1\ncomment line 2'
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Usage, Str)
+ ObjectList.append(Object)
+
+ Usage = 'UNDEFINED'
+ Str = 'commment line 3'
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenPcdSections(ObjectList)
+ Expected = '''[Pcd]
+# commment line 1
+# comment line 2
+# commment line 3
+TSCName.CName|DValue|FFE'''
+ self.assertEqual(Result.strip(), Expected)
+
+ #
+ # multiple lines for UNDEFINED and normal usage
+ #
+ def testNormalCase7(self):
+ ObjectList = []
+ ItemType = 'Pcd'
+ TSCName = 'TSCName'
+ CName = 'CName'
+ DValue = 'DValue'
+ FFE = 'FFE'
+
+ Usage = 'UNDEFINED'
+ Str = 'commment line 1\ncomment line 2'
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Usage, Str)
+ ObjectList.append(Object)
+
+ Usage = 'CONSUMES'
+ Str = 'Foo'
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Usage, Str)
+ ObjectList.append(Object)
+
+ Usage = 'UNDEFINED'
+ Str = 'commment line 3'
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenPcdSections(ObjectList)
+ Expected = '''[Pcd]
+# commment line 1
+# comment line 2
+## CONSUMES # Foo
+# commment line 3
+TSCName.CName|DValue|FFE'''
+ self.assertEqual(Result.strip(), Expected)
+
+ # Usage Help INF Comment
+ # CONSUMES Present # Help (keep <EOL> and insert '#' at beginning of each new line)
+ # CONSUMES Not Present <empty>
+
+ #
+ # TAB_INF_FEATURE_PCD
+ #
+ def testNormalCase8(self):
+ ObjectList = []
+ ItemType = TAB_INF_FEATURE_PCD
+ TSCName = 'TSCName'
+ CName = 'CName'
+ DValue = 'DValue'
+ FFE = 'FFE'
+
+ Usage = 'CONSUMES'
+ Str = 'commment line 1\ncomment line 2'
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenPcdSections(ObjectList)
+ Expected = '''[FeaturePcd]
+# commment line 1
+# comment line 2
+TSCName.CName|DValue|FFE'''
+ self.assertEqual(Result.strip(), Expected)
+
+ #
+ # TAB_INF_FEATURE_PCD
+ #
+ def testNormalCase9(self):
+ ObjectList = []
+ ItemType = TAB_INF_FEATURE_PCD
+ TSCName = 'TSCName'
+ CName = 'CName'
+ DValue = 'DValue'
+ FFE = 'FFE'
+
+ Usage = 'CONSUMES'
+ Str = ''
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenPcdSections(ObjectList)
+ Expected = '''[FeaturePcd]
+TSCName.CName|DValue|FFE'''
+ self.assertEqual(Result.strip(), Expected)
+
+ #
+ # TAB_INF_FEATURE_PCD
+ #
+ def testNormalCase10(self):
+ ObjectList = []
+ ItemType = TAB_INF_FEATURE_PCD
+ TSCName = 'TSCName'
+ CName = 'CName'
+ DValue = 'DValue'
+ FFE = 'FFE'
+
+ Usage = 'PRODUCES'
+ Str = 'commment line 1\ncomment line 2'
+ Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
+ Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenPcdSections(ObjectList)
+ Expected = '''
+
+[FeaturePcd]
+# commment line 1
+# comment line 2
+TSCName.CName|DValue|FFE
+'''
+ self.assertEqual(Result, Expected)
+
+
+#
+# Test GenSpecialSections of Hob
+#
+class GenHobSectionsTest(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ #
+ # This is the API to generate Event Object to help UnitTest
+ #
+ def ObjectFactory(self, SupArchList, Type, Usage, Str):
+ Object = HobObject()
+ HelpStr = Str
+
+ Object.SetHobType(Type)
+ Object.SetUsage(Usage)
+ Object.SetSupArchList(SupArchList)
+
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang('')
+ HelpTextObj.SetString(HelpStr)
+ Object.SetHelpTextList([HelpTextObj])
+
+ return Object
+
+ def testNormalCase1(self):
+ ObjectList = []
+ SupArchList = ['X64']
+ Type = 'Foo'
+ Usage = 'UNDEFINED'
+ Str = 'Help'
+
+ Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenSpecialSections(ObjectList, 'Hob')
+ Expected = '''# [Hob.X64]
+# ##
+# # Help
+# #
+# Foo ## UNDEFINED
+#
+#
+'''
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase2(self):
+ ObjectList = []
+ SupArchList = []
+ Type = 'Foo'
+ Usage = 'UNDEFINED'
+ Str = 'Help'
+
+ Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenSpecialSections(ObjectList, 'Hob')
+ Expected = '''# [Hob]
+# ##
+# # Help
+# #
+# Foo ## UNDEFINED
+#
+#
+'''
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase3(self):
+ ObjectList = []
+ SupArchList = ['X64']
+ Type = 'Foo'
+ Usage = 'UNDEFINED'
+ Str = '\nComment Line 1\n\n'
+
+ Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenSpecialSections(ObjectList, 'Hob')
+ Expected = '''# [Hob.X64]
+# ##
+# # Comment Line 1
+# #
+# Foo ## UNDEFINED
+#
+#
+'''
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase4(self):
+ ObjectList = []
+ SupArchList = ['X64']
+ Type = 'Foo'
+ Usage = 'UNDEFINED'
+ Str = '\nComment Line 1\n'
+
+ Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenSpecialSections(ObjectList, 'Hob')
+ Expected = '''# [Hob.X64]
+# ##
+# # Comment Line 1
+# #
+# Foo ## UNDEFINED
+#
+#
+'''
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase5(self):
+ ObjectList = []
+ SupArchList = ['X64']
+ Type = 'Foo'
+ Usage = 'UNDEFINED'
+ Str = 'Comment Line 1\n\n'
+
+ Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenSpecialSections(ObjectList, 'Hob')
+ Expected = '''# [Hob.X64]
+# ##
+# # Comment Line 1
+# #
+# Foo ## UNDEFINED
+#
+#
+'''
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase6(self):
+ ObjectList = []
+ SupArchList = ['X64']
+ Type = 'Foo'
+ Usage = 'UNDEFINED'
+ Str = ''
+
+ Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenSpecialSections(ObjectList, 'Hob')
+ Expected = '''# [Hob.X64]
+# Foo ## UNDEFINED
+#
+#
+'''
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase7(self):
+ ObjectList = []
+ SupArchList = ['X64']
+ Type = 'Foo'
+ Usage = 'UNDEFINED'
+ Str = '\nNew Stack HoB'
+
+
+ Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenSpecialSections(ObjectList, 'Hob')
+ Expected = '''# [Hob.X64]
+# ##
+# # New Stack HoB
+# #
+# Foo ## UNDEFINED
+#
+#
+'''
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase8(self):
+ ObjectList = []
+ SupArchList = ['X64']
+ Type = 'Foo'
+ Usage = 'UNDEFINED'
+ Str = '\nNew Stack HoB\n\nTail Comment'
+
+
+ Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenSpecialSections(ObjectList, 'Hob')
+ Expected = '''# [Hob.X64]
+# ##
+# # New Stack HoB
+# #
+# # Tail Comment
+# #
+# Foo ## UNDEFINED
+#
+#
+'''
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase9(self):
+ ObjectList = []
+ SupArchList = ['X64']
+ Type = 'Foo'
+ Usage = 'UNDEFINED'
+ Str = '\n\n'
+
+
+ Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenSpecialSections(ObjectList, 'Hob')
+ Expected = '''# [Hob.X64]
+# ##
+# #
+# #
+# Foo ## UNDEFINED
+#
+#
+'''
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase10(self):
+ ObjectList = []
+ SupArchList = ['X64']
+ Type = 'Foo'
+ Usage = 'UNDEFINED'
+ Str = '\n'
+
+ Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenSpecialSections(ObjectList, 'Hob')
+ Expected = '''# [Hob.X64]
+# ##
+# #
+# #
+# Foo ## UNDEFINED
+#
+#
+'''
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase11(self):
+ ObjectList = []
+ SupArchList = ['X64']
+ Type = 'Foo'
+ Usage = 'UNDEFINED'
+ Str = '\n\n\n'
+
+ Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenSpecialSections(ObjectList, 'Hob')
+ Expected = '''# [Hob.X64]
+# ##
+# #
+# #
+# Foo ## UNDEFINED
+#
+#
+'''
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase12(self):
+ ObjectList = []
+ SupArchList = ['X64']
+ Type = 'Foo'
+ Usage = 'UNDEFINED'
+ Str = '\n\n\n\n'
+
+ Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
+ ObjectList.append(Object)
+
+ Result = GenSpecialSections(ObjectList, 'Hob')
+ Expected = '''# [Hob.X64]
+# ##
+# #
+# #
+# #
+# Foo ## UNDEFINED
+#
+#
+'''
+ self.assertEqual(Result, Expected)
+
+#
+# Test GenGenericCommentF
+#
+class GenGenericCommentFTest(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def testNormalCase1(self):
+ CommentLines = 'Comment Line 1'
+ Result = GenGenericCommentF(CommentLines)
+ Expected = '# Comment Line 1\n'
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase2(self):
+ CommentLines = '\n'
+ Result = GenGenericCommentF(CommentLines)
+ Expected = '#\n'
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase3(self):
+ CommentLines = '\n\n\n'
+ Result = GenGenericCommentF(CommentLines)
+ Expected = '#\n#\n#\n'
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase4(self):
+ CommentLines = 'coment line 1\n'
+ Result = GenGenericCommentF(CommentLines)
+ Expected = '# coment line 1\n'
+ self.assertEqual(Result, Expected)
+
+ def testNormalCase5(self):
+ CommentLines = 'coment line 1\n coment line 2\n'
+ Result = GenGenericCommentF(CommentLines)
+ Expected = '# coment line 1\n# coment line 2\n'
+ self.assertEqual(Result, Expected)
+
+if __name__ == '__main__':
+ Logger.Initialize()
+ unittest.main()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/CommentParsingUnitTest.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/CommentParsingUnitTest.py
new file mode 100755
index 00000000..c033322c
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/CommentParsingUnitTest.py
@@ -0,0 +1,917 @@
+## @file
+# This file contain unit test for CommentParsing
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+import unittest
+
+import Logger.Log as Logger
+from Library.CommentParsing import ParseHeaderCommentSection, \
+ ParseGenericComment, \
+ ParseDecPcdGenericComment, \
+ ParseDecPcdTailComment
+from Library.CommentParsing import _IsCopyrightLine
+from Library.StringUtils import GetSplitValueList
+from Library.DataType import TAB_SPACE_SPLIT
+from Library.DataType import TAB_LANGUAGE_EN_US
+
+#
+# Test ParseHeaderCommentSection
+#
+class ParseHeaderCommentSectionTest(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ #
+ # Normal case1: have license/copyright/license above @file
+ #
+ def testNormalCase1(self):
+ TestCommentLines1 = \
+ '''# License1
+ # License2
+ #
+ ## @file
+ # example abstract
+ #
+ # example description
+ #
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ #
+ # License3
+ #'''
+
+ CommentList = GetSplitValueList(TestCommentLines1, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ Abstract, Description, Copyright, License = \
+ ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
+
+ ExpectedAbstract = 'example abstract'
+ self.assertEqual(Abstract, ExpectedAbstract)
+
+ ExpectedDescription = 'example description'
+ self.assertEqual(Description, ExpectedDescription)
+
+ ExpectedCopyright = \
+ 'Copyright (c) 2007 - 2010,'\
+ ' Intel Corporation. All rights reserved.<BR>'
+ self.assertEqual(Copyright, ExpectedCopyright)
+
+ ExpectedLicense = 'License1\nLicense2\n\nLicense3'
+ self.assertEqual(License, ExpectedLicense)
+
+ #
+ # Normal case2: have license/copyright above @file, but no copyright after
+ #
+ def testNormalCase2(self):
+ TestCommentLines2 = \
+ ''' # License1
+ # License2
+ #
+ ## @file
+ # example abstract
+ #
+ # example description
+ #
+ #Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ #
+ ##'''
+
+ CommentList = GetSplitValueList(TestCommentLines2, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ Abstract, Description, Copyright, License = \
+ ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
+
+ ExpectedAbstract = 'example abstract'
+ self.assertEqual(Abstract, ExpectedAbstract)
+
+ ExpectedDescription = 'example description'
+ self.assertEqual(Description, ExpectedDescription)
+
+ ExpectedCopyright = \
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
+ ' All rights reserved.<BR>'
+ self.assertEqual(Copyright, ExpectedCopyright)
+
+ ExpectedLicense = 'License1\nLicense2'
+ self.assertEqual(License, ExpectedLicense)
+
+
+ #
+ # Normal case2: have license/copyright/license above @file,
+ # but no abstract/description
+ #
+ def testNormalCase3(self):
+ TestCommentLines3 = \
+ ''' # License1
+ # License2
+ #
+ ## @file
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ #
+ # License3 Line1
+ # License3 Line2
+ ##'''
+
+ CommentList = GetSplitValueList(TestCommentLines3, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ Abstract, Description, Copyright, License = \
+ ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
+
+ ExpectedAbstract = ''
+ self.assertEqual(Abstract, ExpectedAbstract)
+
+ ExpectedDescription = ''
+ self.assertEqual(Description, ExpectedDescription)
+
+ ExpectedCopyright = \
+ 'Copyright (c) 2007 - 2010,'\
+ ' Intel Corporation. All rights reserved.<BR>'
+ self.assertEqual(Copyright, ExpectedCopyright)
+
+ ExpectedLicense = \
+ 'License1\n' \
+ 'License2\n\n' \
+ 'License3 Line1\n' \
+ 'License3 Line2'
+ self.assertEqual(License, ExpectedLicense)
+
+ #
+ # Normal case4: format example in spec
+ #
+ def testNormalCase4(self):
+ TestCommentLines = \
+ '''
+ ## @file
+ # Abstract
+ #
+ # Description
+ #
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ #
+ # License
+ #
+ ##'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ Abstract, Description, Copyright, License = \
+ ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
+
+ ExpectedAbstract = 'Abstract'
+ self.assertEqual(Abstract, ExpectedAbstract)
+
+ ExpectedDescription = 'Description'
+ self.assertEqual(Description, ExpectedDescription)
+
+ ExpectedCopyright = \
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
+ ' All rights reserved.<BR>'
+ self.assertEqual(Copyright, ExpectedCopyright)
+
+ ExpectedLicense = \
+ 'License'
+ self.assertEqual(License, ExpectedLicense)
+
+ #
+ # Normal case5: other line between copyright
+ #
+ def testNormalCase5(self):
+ TestCommentLines = \
+ '''
+ ## @file
+ # Abstract
+ #
+ # Description
+ #
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ # other line
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ #
+ # License
+ #
+ ##'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ Abstract, Description, Copyright, License = \
+ ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
+
+ ExpectedAbstract = 'Abstract'
+ self.assertEqual(Abstract, ExpectedAbstract)
+
+ ExpectedDescription = 'Description'
+ self.assertEqual(Description, ExpectedDescription)
+
+ ExpectedCopyright = \
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
+ ' All rights reserved.<BR>\n'\
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
+ ' All rights reserved.<BR>'
+ self.assertEqual(Copyright, ExpectedCopyright)
+
+ ExpectedLicense = \
+ 'License'
+ self.assertEqual(License, ExpectedLicense)
+
+ #
+ # Normal case6: multiple lines of copyright
+ #
+ def testNormalCase6(self):
+ TestCommentLines = \
+ '''
+ ## @file
+ # Abstract
+ #
+ # Description
+ #
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ # Copyright (c) 2007 - 2010, FOO1 Corporation. All rights reserved.<BR>
+ # Copyright (c) 2007 - 2010, FOO2 Corporation. All rights reserved.<BR>
+ #
+ # License
+ #
+ ##'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ Abstract, Description, Copyright, License = \
+ ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
+
+ ExpectedAbstract = 'Abstract'
+ self.assertEqual(Abstract, ExpectedAbstract)
+
+ ExpectedDescription = 'Description'
+ self.assertEqual(Description, ExpectedDescription)
+
+ ExpectedCopyright = \
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
+ ' All rights reserved.<BR>\n'\
+ 'Copyright (c) 2007 - 2010, FOO1 Corporation.'\
+ ' All rights reserved.<BR>\n'\
+ 'Copyright (c) 2007 - 2010, FOO2 Corporation.'\
+ ' All rights reserved.<BR>'
+ self.assertEqual(Copyright, ExpectedCopyright)
+
+ ExpectedLicense = \
+ 'License'
+ self.assertEqual(License, ExpectedLicense)
+
+ #
+ # Normal case7: Abstract not present
+ #
+ def testNormalCase7(self):
+ TestCommentLines = \
+ '''
+ ## @file
+ #
+ # Description
+ #
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ # Copyright (c) 2007 - 2010, FOO1 Corporation. All rights reserved.<BR>
+ # Copyright (c) 2007 - 2010, FOO2 Corporation. All rights reserved.<BR>
+ #
+ # License
+ #
+ ##'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ Abstract, Description, Copyright, License = \
+ ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
+
+ ExpectedAbstract = ''
+ self.assertEqual(Abstract, ExpectedAbstract)
+
+ ExpectedDescription = 'Description'
+ self.assertEqual(Description, ExpectedDescription)
+
+ ExpectedCopyright = \
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
+ ' All rights reserved.<BR>\n'\
+ 'Copyright (c) 2007 - 2010, FOO1 Corporation.'\
+ ' All rights reserved.<BR>\n'\
+ 'Copyright (c) 2007 - 2010, FOO2 Corporation.'\
+ ' All rights reserved.<BR>'
+ self.assertEqual(Copyright, ExpectedCopyright)
+
+ ExpectedLicense = \
+ 'License'
+ self.assertEqual(License, ExpectedLicense)
+
+ #
+ # Normal case8: Description not present
+ #
+ def testNormalCase8(self):
+ TestCommentLines = \
+ '''
+ ## @file
+ # Abstact
+ #
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ #
+ # License
+ #
+ ##'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ Abstract, Description, Copyright, License = \
+ ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
+
+ ExpectedAbstract = 'Abstact'
+ self.assertEqual(Abstract, ExpectedAbstract)
+
+ ExpectedDescription = ''
+ self.assertEqual(Description, ExpectedDescription)
+
+ ExpectedCopyright = \
+ 'Copyright (c) 2007 - 2018, Intel Corporation.'\
+ ' All rights reserved.<BR>'
+ self.assertEqual(Copyright, ExpectedCopyright)
+
+ ExpectedLicense = \
+ 'License'
+ self.assertEqual(License, ExpectedLicense)
+
+ #
+ # Error case1: No copyright found
+ #
+ def testErrorCase1(self):
+ TestCommentLines = \
+ '''
+ ## @file
+ # Abstract
+ #
+ # Description
+ #
+ # License
+ #
+ ##'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ self.assertRaises(Logger.FatalError,
+ ParseHeaderCommentSection,
+ TestCommentLinesList,
+ "PhonyFile")
+
+ #
+ # Error case2: non-empty non-comment lines passed in
+ #
+ def testErrorCase2(self):
+ TestCommentLines = \
+ '''
+ ## @file
+ # Abstract
+ #
+ this is invalid line
+ # Description
+ #
+ # Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+ # License
+ #
+ ##'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ self.assertRaises(Logger.FatalError,
+ ParseHeaderCommentSection,
+ TestCommentLinesList,
+ "PhonyFile")
+
+#
+# Test ParseGenericComment
+#
+class ParseGenericCommentTest(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ #
+ # Normal case1: one line of comment
+ #
+ def testNormalCase1(self):
+ TestCommentLines = \
+ '''# hello world'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ HelptxtObj = ParseGenericComment(TestCommentLinesList, 'testNormalCase1')
+ self.failIf(not HelptxtObj)
+ self.assertEqual(HelptxtObj.GetString(), 'hello world')
+ self.assertEqual(HelptxtObj.GetLang(), TAB_LANGUAGE_EN_US)
+
+ #
+ # Normal case2: multiple lines of comment
+ #
+ def testNormalCase2(self):
+ TestCommentLines = \
+ '''## hello world
+ # second line'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ HelptxtObj = ParseGenericComment(TestCommentLinesList, 'testNormalCase2')
+ self.failIf(not HelptxtObj)
+ self.assertEqual(HelptxtObj.GetString(),
+ 'hello world\n' + 'second line')
+ self.assertEqual(HelptxtObj.GetLang(), TAB_LANGUAGE_EN_US)
+
+ #
+ # Normal case3: multiple lines of comment, non comment lines will be skipped
+ #
+ def testNormalCase3(self):
+ TestCommentLines = \
+ '''## hello world
+ This is not comment line'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ HelptxtObj = ParseGenericComment(TestCommentLinesList, 'testNormalCase3')
+ self.failIf(not HelptxtObj)
+ self.assertEqual(HelptxtObj.GetString(),
+ 'hello world\n\n')
+ self.assertEqual(HelptxtObj.GetLang(), TAB_LANGUAGE_EN_US)
+
+#
+# Test ParseDecPcdGenericComment
+#
+class ParseDecPcdGenericCommentTest(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ #
+ # Normal case1: comments with no special comment
+ #
+ def testNormalCase1(self):
+ TestCommentLines = \
+ '''## hello world
+ # second line'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ (HelpTxt, PcdErr) = \
+ ParseDecPcdGenericComment(TestCommentLinesList, 'testNormalCase1')
+ self.failIf(not HelpTxt)
+ self.failIf(PcdErr)
+ self.assertEqual(HelpTxt,
+ 'hello world\n' + 'second line')
+
+
+ #
+ # Normal case2: comments with valid list
+ #
+ def testNormalCase2(self):
+ TestCommentLines = \
+ '''## hello world
+ # second line
+ # @ValidList 1, 2, 3
+ # other line'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ (HelpTxt, PcdErr) = \
+ ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
+ self.failIf(not HelpTxt)
+ self.failIf(not PcdErr)
+ self.assertEqual(HelpTxt,
+ 'hello world\n' + 'second line\n' + 'other line')
+ ExpectedList = GetSplitValueList('1 2 3', TAB_SPACE_SPLIT)
+ ActualList = [item for item in \
+ GetSplitValueList(PcdErr.GetValidValue(), TAB_SPACE_SPLIT) if item]
+ self.assertEqual(ExpectedList, ActualList)
+ self.failIf(PcdErr.GetExpression())
+ self.failIf(PcdErr.GetValidValueRange())
+
+ #
+ # Normal case3: comments with valid range
+ #
+ def testNormalCase3(self):
+ TestCommentLines = \
+ '''## hello world
+ # second line
+ # @ValidRange LT 1 AND GT 2
+ # other line'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ (HelpTxt, PcdErr) = \
+ ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
+ self.failIf(not HelpTxt)
+ self.failIf(not PcdErr)
+ self.assertEqual(HelpTxt,
+ 'hello world\n' + 'second line\n' + 'other line')
+ self.assertEqual(PcdErr.GetValidValueRange().strip(), 'LT 1 AND GT 2')
+ self.failIf(PcdErr.GetExpression())
+ self.failIf(PcdErr.GetValidValue())
+
+ #
+ # Normal case4: comments with valid expression
+ #
+ def testNormalCase4(self):
+ TestCommentLines = \
+ '''## hello world
+ # second line
+ # @Expression LT 1 AND GT 2
+ # other line'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ (HelpTxt, PcdErr) = \
+ ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
+ self.failIf(not HelpTxt)
+ self.failIf(not PcdErr)
+ self.assertEqual(HelpTxt,
+ 'hello world\n' + 'second line\n' + 'other line')
+ self.assertEqual(PcdErr.GetExpression().strip(), 'LT 1 AND GT 2')
+ self.failIf(PcdErr.GetValidValueRange())
+ self.failIf(PcdErr.GetValidValue())
+
+ #
+ # Normal case5: comments with valid expression and no generic comment
+ #
+ def testNormalCase5(self):
+ TestCommentLines = \
+ '''# @Expression LT 1 AND GT 2'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ (HelpTxt, PcdErr) = \
+ ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
+ self.failIf(HelpTxt)
+ self.failIf(not PcdErr)
+ self.assertEqual(PcdErr.GetExpression().strip(), 'LT 1 AND GT 2')
+ self.failIf(PcdErr.GetValidValueRange())
+ self.failIf(PcdErr.GetValidValue())
+
+ #
+ # Normal case6: comments with only generic help text
+ #
+ def testNormalCase6(self):
+ TestCommentLines = \
+ '''#'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ (HelpTxt, PcdErr) = \
+ ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
+ self.assertEqual(HelpTxt, '\n')
+ self.failIf(PcdErr)
+
+
+
+ #
+ # Error case1: comments with both expression and valid list, use later
+ # ignore the former and with a warning message
+ #
+ def testErrorCase1(self):
+ TestCommentLines = \
+ '''## hello world
+ # second line
+ # @ValidList 1, 2, 3
+ # @Expression LT 1 AND GT 2
+ # other line'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ try:
+ ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
+ except Logger.FatalError:
+ pass
+
+#
+# Test ParseDecPcdTailComment
+#
+class ParseDecPcdTailCommentTest(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ #
+ # Normal case1: comments with no SupModeList
+ #
+ def testNormalCase1(self):
+ TestCommentLines = \
+ '''## #hello world'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ (SupModeList, HelpStr) = \
+ ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
+ self.failIf(not HelpStr)
+ self.failIf(SupModeList)
+ self.assertEqual(HelpStr,
+ 'hello world')
+
+ #
+ # Normal case2: comments with one SupMode
+ #
+ def testNormalCase2(self):
+ TestCommentLines = \
+ '''## BASE #hello world'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ (SupModeList, HelpStr) = \
+ ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
+ self.failIf(not HelpStr)
+ self.failIf(not SupModeList)
+ self.assertEqual(HelpStr,
+ 'hello world')
+ self.assertEqual(SupModeList,
+ ['BASE'])
+
+ #
+ # Normal case3: comments with more than one SupMode
+ #
+ def testNormalCase3(self):
+ TestCommentLines = \
+ '''## BASE UEFI_APPLICATION #hello world'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ (SupModeList, HelpStr) = \
+ ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
+ self.failIf(not HelpStr)
+ self.failIf(not SupModeList)
+ self.assertEqual(HelpStr,
+ 'hello world')
+ self.assertEqual(SupModeList,
+ ['BASE', 'UEFI_APPLICATION'])
+
+ #
+ # Normal case4: comments with more than one SupMode, no help text
+ #
+ def testNormalCase4(self):
+ TestCommentLines = \
+ '''## BASE UEFI_APPLICATION'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ (SupModeList, HelpStr) = \
+ ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
+ self.failIf(HelpStr)
+ self.failIf(not SupModeList)
+ self.assertEqual(SupModeList,
+ ['BASE', 'UEFI_APPLICATION'])
+
+ #
+ # Normal case5: general comments with no supModList, extract from real case
+ #
+ def testNormalCase5(self):
+ TestCommentLines = \
+ ''' # 1 = 128MB, 2 = 256MB, 3 = MAX'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ (SupModeList, HelpStr) = \
+ ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
+ self.failIf(not HelpStr)
+ self.assertEqual(HelpStr,
+ '1 = 128MB, 2 = 256MB, 3 = MAX')
+ self.failIf(SupModeList)
+
+
+ #
+ # Error case2: comments with supModList contains valid and invalid
+ # module type
+ #
+ def testErrorCase2(self):
+ TestCommentLines = \
+ '''## BASE INVALID_MODULE_TYPE #hello world'''
+
+ CommentList = GetSplitValueList(TestCommentLines, "\n")
+ LineNum = 0
+ TestCommentLinesList = []
+ for Comment in CommentList:
+ LineNum += 1
+ TestCommentLinesList.append((Comment, LineNum))
+
+ try:
+ ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
+ except Logger.FatalError:
+ pass
+
+
+#
+# Test _IsCopyrightLine
+#
+class _IsCopyrightLineTest(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ #
+ # Normal case
+ #
+ def testCase1(self):
+ Line = 'this is a copyright ( line'
+ Result = _IsCopyrightLine(Line)
+ self.failIf(not Result)
+
+ #
+ # Normal case
+ #
+ def testCase2(self):
+ Line = 'this is a Copyright ( line'
+ Result = _IsCopyrightLine(Line)
+ self.failIf(not Result)
+
+ #
+ # Normal case
+ #
+ def testCase3(self):
+ Line = 'this is not aCopyright ( line'
+ Result = _IsCopyrightLine(Line)
+ self.failIf(Result)
+
+ #
+ # Normal case
+ #
+ def testCase4(self):
+ Line = 'this is Copyright( line'
+ Result = _IsCopyrightLine(Line)
+ self.failIf(not Result)
+
+ #
+ # Normal case
+ #
+ def testCase5(self):
+ Line = 'this is Copyright (line'
+ Result = _IsCopyrightLine(Line)
+ self.failIf(not Result)
+
+ #
+ # Normal case
+ #
+ def testCase6(self):
+ Line = 'this is not Copyright line'
+ Result = _IsCopyrightLine(Line)
+ self.failIf(Result)
+
+ #
+ # Normal case
+ #
+ def testCase7(self):
+ Line = 'Copyright (c) line'
+ Result = _IsCopyrightLine(Line)
+ self.failIf(not Result)
+
+ #
+ # Normal case
+ #
+ def testCase8(self):
+ Line = ' Copyright (c) line'
+ Result = _IsCopyrightLine(Line)
+ self.failIf(not Result)
+
+ #
+ # Normal case
+ #
+ def testCase9(self):
+ Line = 'not a Copyright '
+ Result = _IsCopyrightLine(Line)
+ self.failIf(Result)
+
+if __name__ == '__main__':
+ Logger.Initialize()
+ unittest.main()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/DecParserTest.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/DecParserTest.py
new file mode 100755
index 00000000..3c64c8ef
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/DecParserTest.py
@@ -0,0 +1,279 @@
+## @file
+# This file contain unit test for DecParser
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+from __future__ import print_function
+import os
+import unittest
+
+from Parser.DecParserMisc import \
+ IsValidCArray, \
+ IsValidPcdDatum
+
+from Parser.DecParser import Dec
+
+from Library.ParserValidate import IsValidCFormatGuid
+
+#
+# Test tool function
+#
+def TestToolFuncs():
+ assert IsValidCArray('{0x1, 0x23}')
+
+ # Empty after comma
+ assert not IsValidCArray('{0x1, 0x23, }')
+
+ # 0x2345 too long
+ assert not IsValidCArray('{0x1, 0x2345}')
+
+ # Must end with '}'
+ assert not IsValidCArray('{0x1, 0x23, ')
+
+ # Whitespace between numbers
+ assert not IsValidCArray('{0x1, 0x2 3, }')
+
+ assert IsValidPcdDatum('VOID*', '"test"')[0]
+ assert IsValidPcdDatum('VOID*', 'L"test"')[0]
+ assert IsValidPcdDatum('BOOLEAN', 'TRUE')[0]
+ assert IsValidPcdDatum('BOOLEAN', 'FALSE')[0]
+ assert IsValidPcdDatum('BOOLEAN', '0')[0]
+ assert IsValidPcdDatum('BOOLEAN', '1')[0]
+ assert IsValidPcdDatum('UINT8', '0xab')[0]
+
+ assert not IsValidPcdDatum('UNKNOWNTYPE', '0xabc')[0]
+ assert not IsValidPcdDatum('UINT8', 'not number')[0]
+
+ assert( IsValidCFormatGuid('{ 0xfa0b1735 , 0x87a0, 0x4193, {0xb2, 0x66 , 0x53, 0x8c , 0x38, 0xaf, 0x48, 0xce }}'))
+ assert( not IsValidCFormatGuid('{ 0xfa0b1735 , 0x87a0, 0x4193, {0xb2, 0x66 , 0x53, 0x8c , 0x38, 0xaf, 0x48, 0xce }} 0xaa'))
+
+def TestTemplate(TestString, TestFunc):
+ Path = os.path.join(os.getcwd(), 'test.dec')
+ Path = os.path.normpath(Path)
+ try:
+ f = open(Path, 'w')
+
+ # Write test string to file
+ f.write(TestString)
+
+ # Close file
+ f.close()
+ except:
+ print('Can not create temporary file [%s]!' % Path)
+ exit(-1)
+
+ # Call test function to test
+ Ret = TestFunc(Path, TestString)
+
+ # Test done, remove temporary file
+ os.remove(Path)
+ return Ret
+
+# To make test unit works OK, must set IsRaiseError to True
+# This function test right syntax DEC file
+# @retval: parser object
+#
+def TestOK(Path, TestString):
+ try:
+ Parser = Dec(Path)
+ except:
+ raise 'Bug!!! Correct syntax in DEC file, but exception raised!\n' + TestString
+ return Parser
+
+# This function test wrong syntax DEC file
+# if parser checked wrong syntax, exception thrown and it's expected result
+def TestError(Path, TestString):
+ try:
+ Dec(Path)
+ except:
+ # Raise error, get expected result
+ return True
+ raise 'Bug!!! Wrong syntax in DEC file, but passed by DEC parser!!\n' + TestString
+
+def TestDecDefine():
+ TestString = '''
+ [Defines]
+ DEC_SPECIFICATION = 0x00010005
+ PACKAGE_NAME = MdePkg
+ PACKAGE_GUID = 1E73767F-8F52-4603-AEB4-F29B510B6766
+ PACKAGE_VERSION = 1.02
+ '''
+ Parser = TestTemplate(TestString, TestOK)
+ DefObj = Parser.GetDefineSectionObject()
+ assert DefObj.GetPackageSpecification() == '0x00010005'
+ assert DefObj.GetPackageName() == 'MdePkg'
+ assert DefObj.GetPackageGuid() == '1E73767F-8F52-4603-AEB4-F29B510B6766'
+ assert DefObj.GetPackageVersion() == '1.02'
+
+ TestString = '''
+ [Defines]
+ UNKNOW_KEY = 0x00010005 # A unknown key
+ '''
+ assert TestTemplate(TestString, TestError)
+
+ TestString = '''
+ [Defines]
+ PACKAGE_GUID = F-8F52-4603-AEB4-F29B510B6766 # Error GUID
+ '''
+ assert TestTemplate(TestString, TestError)
+
+def TestDecInclude():
+ TestString = '''
+ [Defines]
+ DEC_SPECIFICATION = 0x00010005
+ PACKAGE_NAME = MdePkg
+ PACKAGE_GUID = 1E73767F-8F52-4603-AEB4-F29B510B6766
+ PACKAGE_VERSION = 1.02
+ [ \\
+ Includes]
+ Include
+ [Includes.IA32]
+ Include/Ia32
+ '''
+
+ # Create directory in current directory
+ try:
+ os.makedirs('Include/Ia32')
+ except:
+ pass
+ Parser = TestTemplate(TestString, TestOK)
+
+ IncObj = Parser.GetIncludeSectionObject()
+ Items = IncObj.GetIncludes()
+ assert len(Items) == 1
+ assert Items[0].File == 'Include'
+
+ Items = IncObj.GetIncludes('IA32')
+ assert len(Items) == 1
+ # normpath is called in DEC parser so '/' is converted to '\'
+ assert Items[0].File == 'Include\\Ia32'
+
+ TestString = '''
+ [Defines]
+ DEC_SPECIFICATION = 0x00010005
+ PACKAGE_NAME = MdePkg
+ PACKAGE_GUID = 1E73767F-8F52-4603-AEB4-F29B510B6766
+ PACKAGE_VERSION = 1.02
+ [Includes]
+ Include_not_exist # directory does not exist
+ '''
+ assert TestTemplate(TestString, TestError)
+
+ os.removedirs('Include/Ia32')
+
+def TestDecGuidPpiProtocol():
+ TestString = '''
+ [Defines]
+ DEC_SPECIFICATION = 0x00010005
+ PACKAGE_NAME = MdePkg
+ PACKAGE_GUID = 1E73767F-8F52-4603-AEB4-F29B510B6766
+ PACKAGE_VERSION = 1.02
+ [Guids]
+ #
+ # GUID defined in UEFI2.1/UEFI2.0/EFI1.1
+ #
+ ## Include/Guid/GlobalVariable.h
+ gEfiGlobalVariableGuid = { 0x8BE4DF61, 0x93CA, 0x11D2, { 0xAA, 0x0D, 0x00, 0xE0, 0x98, 0x03, 0x2B, 0x8C }}
+ [Protocols]
+ ## Include/Protocol/Bds.h
+ gEfiBdsArchProtocolGuid = { 0x665E3FF6, 0x46CC, 0x11D4, { 0x9A, 0x38, 0x00, 0x90, 0x27, 0x3F, 0xC1, 0x4D }}
+ [Ppis]
+ ## Include/Ppi/MasterBootMode.h
+ gEfiPeiMasterBootModePpiGuid = { 0x7408d748, 0xfc8c, 0x4ee6, {0x92, 0x88, 0xc4, 0xbe, 0xc0, 0x92, 0xa4, 0x10 } }
+ '''
+ Parser = TestTemplate(TestString, TestOK)
+ Obj = Parser.GetGuidSectionObject()
+ Items = Obj.GetGuids()
+ assert Obj.GetSectionName() == 'Guids'.upper()
+ assert len(Items) == 1
+ assert Items[0].GuidCName == 'gEfiGlobalVariableGuid'
+ assert Items[0].GuidCValue == '{ 0x8BE4DF61, 0x93CA, 0x11D2, { 0xAA, 0x0D, 0x00, 0xE0, 0x98, 0x03, 0x2B, 0x8C }}'
+
+ Obj = Parser.GetProtocolSectionObject()
+ Items = Obj.GetProtocols()
+ assert Obj.GetSectionName() == 'Protocols'.upper()
+ assert len(Items) == 1
+ assert Items[0].GuidCName == 'gEfiBdsArchProtocolGuid'
+ assert Items[0].GuidCValue == '{ 0x665E3FF6, 0x46CC, 0x11D4, { 0x9A, 0x38, 0x00, 0x90, 0x27, 0x3F, 0xC1, 0x4D }}'
+
+ Obj = Parser.GetPpiSectionObject()
+ Items = Obj.GetPpis()
+ assert Obj.GetSectionName() == 'Ppis'.upper()
+ assert len(Items) == 1
+ assert Items[0].GuidCName == 'gEfiPeiMasterBootModePpiGuid'
+ assert Items[0].GuidCValue == '{ 0x7408d748, 0xfc8c, 0x4ee6, {0x92, 0x88, 0xc4, 0xbe, 0xc0, 0x92, 0xa4, 0x10 } }'
+
+def TestDecPcd():
+ TestString = '''
+ [Defines]
+ DEC_SPECIFICATION = 0x00010005
+ PACKAGE_NAME = MdePkg
+ PACKAGE_GUID = 1E73767F-8F52-4603-AEB4-F29B510B6766
+ PACKAGE_VERSION = 1.02
+ [PcdsFeatureFlag]
+ ## If TRUE, the component name protocol will not be installed.
+ gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|FALSE|BOOLEAN|0x0000000d
+
+ [PcdsFixedAtBuild]
+ ## Indicates the maximum length of unicode string
+ gEfiMdePkgTokenSpaceGuid.PcdMaximumUnicodeStringLength|1000000|UINT32|0x00000001
+
+ [PcdsFixedAtBuild.IPF]
+ ## The base address of IO port space for IA64 arch
+ gEfiMdePkgTokenSpaceGuid.PcdIoBlockBaseAddressForIpf|0x0ffffc000000|UINT64|0x0000000f
+
+ [PcdsFixedAtBuild,PcdsPatchableInModule]
+ ## This flag is used to control the printout of DebugLib
+ gEfiMdePkgTokenSpaceGuid.PcdDebugPrintErrorLevel|0x80000000|UINT32|0x00000006
+
+ [PcdsFixedAtBuild,PcdsPatchableInModule,PcdsDynamic]
+ ## This value is used to set the base address of pci express hierarchy
+ gEfiMdePkgTokenSpaceGuid.PcdPciExpressBaseAddress|0xE0000000|UINT64|0x0000000a
+ '''
+ Parser = TestTemplate(TestString, TestOK)
+ Obj = Parser.GetPcdSectionObject()
+ Items = Obj.GetPcds('PcdsFeatureFlag', 'COMMON')
+ assert len(Items) == 1
+ assert Items[0].TokenSpaceGuidCName == 'gEfiMdePkgTokenSpaceGuid'
+ assert Items[0].TokenCName == 'PcdComponentNameDisable'
+ assert Items[0].DefaultValue == 'FALSE'
+ assert Items[0].DatumType == 'BOOLEAN'
+ assert Items[0].TokenValue == '0x0000000d'
+
+ Items = Obj.GetPcdsByType('PcdsFixedAtBuild')
+ assert len(Items) == 4
+ assert len(Obj.GetPcdsByType('PcdsPatchableInModule')) == 2
+
+def TestDecUserExtension():
+ TestString = '''
+ [Defines]
+ DEC_SPECIFICATION = 0x00010005
+ PACKAGE_NAME = MdePkg
+ PACKAGE_GUID = 1E73767F-8F52-4603-AEB4-F29B510B6766
+ PACKAGE_VERSION = 1.02
+ [UserExtensions.MyID."TestString".IA32]
+ Some Strings...
+ '''
+ Parser = TestTemplate(TestString, TestOK)
+ Obj = Parser.GetUserExtensionSectionObject()
+ Items = Obj.GetAllUserExtensions()
+ assert len(Items) == 1
+ assert Items[0].UserString == 'Some Strings...'
+ assert len(Items[0].ArchAndModuleType) == 1
+ assert ['MyID', '"TestString"', 'IA32'] in Items[0].ArchAndModuleType
+
+if __name__ == '__main__':
+ import Logger.Logger
+ Logger.Logger.Initialize()
+ unittest.FunctionTestCase(TestToolFuncs).runTest()
+ unittest.FunctionTestCase(TestDecDefine).runTest()
+ unittest.FunctionTestCase(TestDecInclude).runTest()
+ unittest.FunctionTestCase(TestDecGuidPpiProtocol).runTest()
+ unittest.FunctionTestCase(TestDecPcd).runTest()
+ unittest.FunctionTestCase(TestDecUserExtension).runTest()
+
+ print('All tests passed...')
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py
new file mode 100755
index 00000000..4addd325
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py
@@ -0,0 +1,528 @@
+## @file
+# This file contain unit test for DecParser
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+import os
+import unittest
+from Logger.Log import FatalError
+
+from Parser.DecParser import \
+ Dec, \
+ _DecDefine, \
+ _DecLibraryclass, \
+ _DecPcd, \
+ _DecGuid, \
+ FileContent, \
+ _DecBase, \
+ CleanString
+
+from Object.Parser.DecObject import _DecComments
+
+#
+# Test CleanString
+#
+class CleanStringTestCase(unittest.TestCase):
+ def testCleanString(self):
+ Line, Comment = CleanString('')
+ self.assertEqual(Line, '')
+ self.assertEqual(Comment, '')
+
+ Line, Comment = CleanString('line without comment')
+ self.assertEqual(Line, 'line without comment')
+ self.assertEqual(Comment, '')
+
+ Line, Comment = CleanString('# pure comment')
+ self.assertEqual(Line, '')
+ self.assertEqual(Comment, '# pure comment')
+
+ Line, Comment = CleanString('line # and comment')
+ self.assertEqual(Line, 'line')
+ self.assertEqual(Comment, '# and comment')
+
+ def testCleanStringCpp(self):
+ Line, Comment = CleanString('line // and comment', AllowCppStyleComment = True)
+ self.assertEqual(Line, 'line')
+ self.assertEqual(Comment, '# and comment')
+
+#
+# Test _DecBase._MacroParser function
+#
+class MacroParserTestCase(unittest.TestCase):
+ def setUp(self):
+ self.dec = _DecBase(FileContent('dummy', []))
+
+ def testCorrectMacro(self):
+ self.dec._MacroParser('DEFINE MACRO1 = test1')
+ self.failIf('MACRO1' not in self.dec._LocalMacro)
+ self.assertEqual(self.dec._LocalMacro['MACRO1'], 'test1')
+
+ def testErrorMacro1(self):
+ # Raise fatal error, macro name must be upper case letter
+ self.assertRaises(FatalError, self.dec._MacroParser, 'DEFINE not_upper_case = test2')
+
+ def testErrorMacro2(self):
+ # No macro name given
+ self.assertRaises(FatalError, self.dec._MacroParser, 'DEFINE ')
+
+#
+# Test _DecBase._TryBackSlash function
+#
+class TryBackSlashTestCase(unittest.TestCase):
+ def setUp(self):
+ Content = [
+ # Right case
+ 'test no backslash',
+
+ 'test with backslash \\',
+ 'continue second line',
+
+ # Do not precede with whitespace
+ '\\',
+
+ # Empty line after backlash is not allowed
+ 'line with backslash \\',
+ ''
+ ]
+ self.dec = _DecBase(FileContent('dummy', Content))
+
+ def testBackSlash(self):
+ #
+ # Right case, assert return values
+ #
+ ConcatLine, CommentList = self.dec._TryBackSlash(self.dec._RawData.GetNextLine(), [])
+ self.assertEqual(ConcatLine, 'test no backslash')
+ self.assertEqual(CommentList, [])
+
+ ConcatLine, CommentList = self.dec._TryBackSlash(self.dec._RawData.GetNextLine(), [])
+ self.assertEqual(CommentList, [])
+ self.assertEqual(ConcatLine, 'test with backslash continue second line')
+
+ #
+ # Error cases, assert raise exception
+ #
+ self.assertRaises(FatalError, self.dec._TryBackSlash, self.dec._RawData.GetNextLine(), [])
+ self.assertRaises(FatalError, self.dec._TryBackSlash, self.dec._RawData.GetNextLine(), [])
+
+#
+# Test _DecBase.Parse function
+#
+class DataItem(_DecComments):
+ def __init__(self):
+ _DecComments.__init__(self)
+ self.String = ''
+
+class Data(_DecComments):
+ def __init__(self):
+ _DecComments.__init__(self)
+ # List of DataItem
+ self.ItemList = []
+
+class TestInner(_DecBase):
+ def __init__(self, RawData):
+ _DecBase.__init__(self, RawData)
+ self.ItemObject = Data()
+
+ def _StopCurrentParsing(self, Line):
+ return Line == '[TOP]'
+
+ def _ParseItem(self):
+ Item = DataItem()
+ Item.String = self._RawData.CurrentLine
+ self.ItemObject.ItemList.append(Item)
+ return Item
+
+ def _TailCommentStrategy(self, Comment):
+ return Comment.find('@comment') != -1
+
+class TestTop(_DecBase):
+ def __init__(self, RawData):
+ _DecBase.__init__(self, RawData)
+ # List of Data
+ self.ItemObject = []
+
+ # Top parser
+ def _StopCurrentParsing(self, Line):
+ return False
+
+ def _ParseItem(self):
+ TestParser = TestInner(self._RawData)
+ TestParser.Parse()
+ self.ItemObject.append(TestParser.ItemObject)
+ return TestParser.ItemObject
+
+class ParseTestCase(unittest.TestCase):
+ def setUp(self):
+ pass
+
+ def testParse(self):
+ Content = \
+ '''# Top comment
+ [TOP]
+ # sub1 head comment
+ (test item has both head and tail comment) # sub1 tail comment
+ # sub2 head comment
+ (test item has head and special tail comment)
+ # @comment test TailCommentStrategy branch
+
+ (test item has no comment)
+
+ # test NextLine branch
+ [TOP]
+ sub-item
+ '''
+ dec = TestTop(FileContent('dummy', Content.splitlines()))
+ dec.Parse()
+
+ # Two sections
+ self.assertEqual(len(dec.ItemObject), 2)
+
+ data = dec.ItemObject[0]
+ self.assertEqual(data._HeadComment[0][0], '# Top comment')
+ self.assertEqual(data._HeadComment[0][1], 1)
+
+ # 3 subitems
+ self.assertEqual(len(data.ItemList), 3)
+
+ dataitem = data.ItemList[0]
+ self.assertEqual(dataitem.String, '(test item has both head and tail comment)')
+ # Comment content
+ self.assertEqual(dataitem._HeadComment[0][0], '# sub1 head comment')
+ self.assertEqual(dataitem._TailComment[0][0], '# sub1 tail comment')
+ # Comment line number
+ self.assertEqual(dataitem._HeadComment[0][1], 3)
+ self.assertEqual(dataitem._TailComment[0][1], 4)
+
+ dataitem = data.ItemList[1]
+ self.assertEqual(dataitem.String, '(test item has head and special tail comment)')
+ # Comment content
+ self.assertEqual(dataitem._HeadComment[0][0], '# sub2 head comment')
+ self.assertEqual(dataitem._TailComment[0][0], '# @comment test TailCommentStrategy branch')
+ # Comment line number
+ self.assertEqual(dataitem._HeadComment[0][1], 5)
+ self.assertEqual(dataitem._TailComment[0][1], 7)
+
+ dataitem = data.ItemList[2]
+ self.assertEqual(dataitem.String, '(test item has no comment)')
+ # Comment content
+ self.assertEqual(dataitem._HeadComment, [])
+ self.assertEqual(dataitem._TailComment, [])
+
+ data = dec.ItemObject[1]
+ self.assertEqual(data._HeadComment[0][0], '# test NextLine branch')
+ self.assertEqual(data._HeadComment[0][1], 11)
+
+ # 1 subitems
+ self.assertEqual(len(data.ItemList), 1)
+
+ dataitem = data.ItemList[0]
+ self.assertEqual(dataitem.String, 'sub-item')
+ self.assertEqual(dataitem._HeadComment, [])
+ self.assertEqual(dataitem._TailComment, [])
+
+#
+# Test _DecDefine._ParseItem
+#
+class DecDefineTestCase(unittest.TestCase):
+ def GetObj(self, Content):
+ Obj = _DecDefine(FileContent('dummy', Content.splitlines()))
+ Obj._RawData.CurrentLine = Obj._RawData.GetNextLine()
+ return Obj
+
+ def testDecDefine(self):
+ item = self.GetObj('PACKAGE_NAME = MdePkg')._ParseItem()
+ self.assertEqual(item.Key, 'PACKAGE_NAME')
+ self.assertEqual(item.Value, 'MdePkg')
+
+ def testDecDefine1(self):
+ obj = self.GetObj('PACKAGE_NAME')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+ def testDecDefine2(self):
+ obj = self.GetObj('unknown_key = ')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+ def testDecDefine3(self):
+ obj = self.GetObj('PACKAGE_NAME = ')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+#
+# Test _DecLibraryclass._ParseItem
+#
+class DecLibraryTestCase(unittest.TestCase):
+ def GetObj(self, Content):
+ Obj = _DecLibraryclass(FileContent('dummy', Content.splitlines()))
+ Obj._RawData.CurrentLine = Obj._RawData.GetNextLine()
+ return Obj
+
+ def testNoInc(self):
+ obj = self.GetObj('UefiRuntimeLib')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+ def testEmpty(self):
+ obj = self.GetObj(' | ')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+ def testLibclassNaming(self):
+ obj = self.GetObj('lowercase_efiRuntimeLib|Include/Library/UefiRuntimeLib.h')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+ def testLibclassExt(self):
+ obj = self.GetObj('RuntimeLib|Include/Library/UefiRuntimeLib.no_h')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+ def testLibclassRelative(self):
+ obj = self.GetObj('RuntimeLib|Include/../UefiRuntimeLib.h')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+#
+# Test _DecPcd._ParseItem
+#
+class DecPcdTestCase(unittest.TestCase):
+ def GetObj(self, Content):
+ Obj = _DecPcd(FileContent('dummy', Content.splitlines()))
+ Obj._RawData.CurrentLine = Obj._RawData.GetNextLine()
+ Obj._RawData.CurrentScope = [('PcdsFeatureFlag'.upper(), 'COMMON')]
+ return Obj
+
+ def testOK(self):
+ item = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|FALSE|BOOLEAN|0x0000000d')._ParseItem()
+ self.assertEqual(item.TokenSpaceGuidCName, 'gEfiMdePkgTokenSpaceGuid')
+ self.assertEqual(item.TokenCName, 'PcdComponentNameDisable')
+ self.assertEqual(item.DefaultValue, 'FALSE')
+ self.assertEqual(item.DatumType, 'BOOLEAN')
+ self.assertEqual(item.TokenValue, '0x0000000d')
+
+ def testNoCvar(self):
+ obj = self.GetObj('123ai.PcdComponentNameDisable|FALSE|BOOLEAN|0x0000000d')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+ def testSplit(self):
+ obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable FALSE|BOOLEAN|0x0000000d')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+ obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|FALSE|BOOLEAN|0x0000000d | abc')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+ def testUnknownType(self):
+ obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|FALSE|unknown|0x0000000d')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+ def testVoid(self):
+ obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|abc|VOID*|0x0000000d')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+ def testUINT(self):
+ obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|0xabc|UINT8|0x0000000d')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+#
+# Test _DecInclude._ParseItem
+#
+class DecIncludeTestCase(unittest.TestCase):
+ #
+ # Test code to be added
+ #
+ pass
+
+#
+# Test _DecGuid._ParseItem
+#
+class DecGuidTestCase(unittest.TestCase):
+ def GetObj(self, Content):
+ Obj = _DecGuid(FileContent('dummy', Content.splitlines()))
+ Obj._RawData.CurrentLine = Obj._RawData.GetNextLine()
+ Obj._RawData.CurrentScope = [('guids'.upper(), 'COMMON')]
+ return Obj
+
+ def testCValue(self):
+ item = self.GetObj('gEfiIpSecProtocolGuid={ 0xdfb386f7, 0xe100, 0x43ad,'
+ ' {0x9c, 0x9a, 0xed, 0x90, 0xd0, 0x8a, 0x5e, 0x12 }}')._ParseItem()
+ self.assertEqual(item.GuidCName, 'gEfiIpSecProtocolGuid')
+ self.assertEqual(item.GuidCValue, '{ 0xdfb386f7, 0xe100, 0x43ad, {0x9c, 0x9a, 0xed, 0x90, 0xd0, 0x8a, 0x5e, 0x12 }}')
+
+ def testGuidString(self):
+ item = self.GetObj('gEfiIpSecProtocolGuid=1E73767F-8F52-4603-AEB4-F29B510B6766')._ParseItem()
+ self.assertEqual(item.GuidCName, 'gEfiIpSecProtocolGuid')
+ self.assertEqual(item.GuidCValue, '1E73767F-8F52-4603-AEB4-F29B510B6766')
+
+ def testNoValue1(self):
+ obj = self.GetObj('gEfiIpSecProtocolGuid')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+ def testNoValue2(self):
+ obj = self.GetObj('gEfiIpSecProtocolGuid=')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+ def testNoName(self):
+ obj = self.GetObj('=')
+ self.assertRaises(FatalError, obj._ParseItem)
+
+#
+# Test Dec.__init__
+#
+class DecDecInitTestCase(unittest.TestCase):
+ def testNoDecFile(self):
+ self.assertRaises(FatalError, Dec, 'No_Such_File')
+
+class TmpFile:
+ def __init__(self, File):
+ self.File = File
+
+ def Write(self, Content):
+ try:
+ FileObj = open(self.File, 'w')
+ FileObj.write(Content)
+ FileObj.close()
+ except:
+ pass
+
+ def Remove(self):
+ try:
+ os.remove(self.File)
+ except:
+ pass
+
+#
+# Test Dec._UserExtentionSectionParser
+#
+class DecUESectionTestCase(unittest.TestCase):
+ def setUp(self):
+ self.File = TmpFile('test.dec')
+ self.File.Write(
+'''[userextensions.intel."myid"]
+[userextensions.intel."myid".IA32]
+[userextensions.intel."myid".IA32,]
+[userextensions.intel."myid]
+'''
+ )
+
+ def tearDown(self):
+ self.File.Remove()
+
+ def testUserExtentionHeader(self):
+ dec = Dec('test.dec', False)
+
+ # OK: [userextensions.intel."myid"]
+ dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
+ dec._UserExtentionSectionParser()
+ self.assertEqual(len(dec._RawData.CurrentScope), 1)
+ self.assertEqual(dec._RawData.CurrentScope[0][0], 'userextensions'.upper())
+ self.assertEqual(dec._RawData.CurrentScope[0][1], 'intel')
+ self.assertEqual(dec._RawData.CurrentScope[0][2], '"myid"')
+ self.assertEqual(dec._RawData.CurrentScope[0][3], 'COMMON')
+
+ # OK: [userextensions.intel."myid".IA32]
+ dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
+ dec._UserExtentionSectionParser()
+ self.assertEqual(len(dec._RawData.CurrentScope), 1)
+ self.assertEqual(dec._RawData.CurrentScope[0][0], 'userextensions'.upper())
+ self.assertEqual(dec._RawData.CurrentScope[0][1], 'intel')
+ self.assertEqual(dec._RawData.CurrentScope[0][2], '"myid"')
+ self.assertEqual(dec._RawData.CurrentScope[0][3], 'IA32')
+
+ # Fail: [userextensions.intel."myid".IA32,]
+ dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
+ self.assertRaises(FatalError, dec._UserExtentionSectionParser)
+
+ # Fail: [userextensions.intel."myid]
+ dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
+ self.assertRaises(FatalError, dec._UserExtentionSectionParser)
+
+#
+# Test Dec._SectionHeaderParser
+#
+class DecSectionTestCase(unittest.TestCase):
+ def setUp(self):
+ self.File = TmpFile('test.dec')
+ self.File.Write(
+'''[no section start or end
+[,] # empty sub-section
+[unknow_section_name]
+[Includes.IA32.other] # no third one
+[PcdsFeatureFlag, PcdsFixedAtBuild] # feature flag PCD must not be in the same section of other types of PCD
+[Includes.IA32, Includes.IA32]
+[Includes, Includes.IA32] # common cannot be with other arch
+[Includes.IA32, PcdsFeatureFlag] # different section name
+''' )
+
+ def tearDown(self):
+ self.File.Remove()
+
+ def testSectionHeader(self):
+ dec = Dec('test.dec', False)
+ # [no section start or end
+ dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
+ self.assertRaises(FatalError, dec._SectionHeaderParser)
+
+ #[,] # empty sub-section
+ dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
+ self.assertRaises(FatalError, dec._SectionHeaderParser)
+
+ # [unknow_section_name]
+ dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
+ self.assertRaises(FatalError, dec._SectionHeaderParser)
+
+ # [Includes.IA32.other] # no third one
+ dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
+ self.assertRaises(FatalError, dec._SectionHeaderParser)
+
+ # [PcdsFeatureFlag, PcdsFixedAtBuild]
+ dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
+ self.assertRaises(FatalError, dec._SectionHeaderParser)
+
+ # [Includes.IA32, Includes.IA32]
+ dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
+ dec._SectionHeaderParser()
+ self.assertEqual(len(dec._RawData.CurrentScope), 1)
+ self.assertEqual(dec._RawData.CurrentScope[0][0], 'Includes'.upper())
+ self.assertEqual(dec._RawData.CurrentScope[0][1], 'IA32')
+
+ # [Includes, Includes.IA32] # common cannot be with other arch
+ dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
+ self.assertRaises(FatalError, dec._SectionHeaderParser)
+
+ # [Includes.IA32, PcdsFeatureFlag] # different section name not allowed
+ dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
+ self.assertRaises(FatalError, dec._SectionHeaderParser)
+
+#
+# Test Dec._ParseDecComment
+#
+class DecDecCommentTestCase(unittest.TestCase):
+ def testDecHeadComment(self):
+ File = TmpFile('test.dec')
+ File.Write(
+ '''# abc
+ ##''')
+ dec = Dec('test.dec', False)
+ dec.ParseDecComment()
+ self.assertEqual(len(dec._HeadComment), 2)
+ self.assertEqual(dec._HeadComment[0][0], '# abc')
+ self.assertEqual(dec._HeadComment[0][1], 1)
+ self.assertEqual(dec._HeadComment[1][0], '##')
+ self.assertEqual(dec._HeadComment[1][1], 2)
+ File.Remove()
+
+ def testNoDoubleComment(self):
+ File = TmpFile('test.dec')
+ File.Write(
+ '''# abc
+ #
+ [section_start]''')
+ dec = Dec('test.dec', False)
+ dec.ParseDecComment()
+ self.assertEqual(len(dec._HeadComment), 2)
+ self.assertEqual(dec._HeadComment[0][0], '# abc')
+ self.assertEqual(dec._HeadComment[0][1], 1)
+ self.assertEqual(dec._HeadComment[1][0], '#')
+ self.assertEqual(dec._HeadComment[1][1], 2)
+ File.Remove()
+
+if __name__ == '__main__':
+ import Logger.Logger
+ Logger.Logger.Initialize()
+ unittest.main()
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/InfBinarySectionTest.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/InfBinarySectionTest.py
new file mode 100755
index 00000000..7f000648
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/UnitTest/InfBinarySectionTest.py
@@ -0,0 +1,381 @@
+## @file
+# This file contain unit test for Test [Binary] section part of InfParser
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+from __future__ import print_function
+import os
+#import Object.Parser.InfObject as InfObject
+from Object.Parser.InfCommonObject import CurrentLine
+from Object.Parser.InfCommonObject import InfLineCommentObject
+from Object.Parser.InfBinaryObject import InfBinariesObject
+import Logger.Log as Logger
+import Library.GlobalData as Global
+##
+# Test Common binary item
+#
+
+#-------------start of common binary item test input--------------------------#
+
+#
+# Only has 1 element, binary item Type
+#
+SectionStringsCommonItem1 = \
+"""
+GUID
+"""
+#
+# Have 2 elements, binary item Type and FileName
+#
+SectionStringsCommonItem2 = \
+"""
+GUID | Test/Test.guid
+"""
+
+#
+# Have 3 elements, Type | FileName | Target | Family | TagName | FeatureFlagExp
+#
+SectionStringsCommonItem3 = \
+"""
+GUID | Test/Test.guid | DEBUG
+"""
+
+#
+# Have 3 elements, Type | FileName | Target
+# Target with MACRO defined in [Define] section
+#
+SectionStringsCommonItem4 = \
+"""
+GUID | Test/Test.guid | $(TARGET)
+"""
+
+#
+# Have 3 elements, Type | FileName | Target
+# FileName with MACRO defined in [Binary] section
+#
+SectionStringsCommonItem5 = \
+"""
+DEFINE BINARY_FILE_PATH = Test
+GUID | $(BINARY_FILE_PATH)/Test.guid | $(TARGET)
+"""
+
+#
+# Have 4 elements, Type | FileName | Target | Family
+#
+SectionStringsCommonItem6 = \
+"""
+GUID | Test/Test.guid | DEBUG | *
+"""
+
+#
+# Have 4 elements, Type | FileName | Target | Family
+#
+SectionStringsCommonItem7 = \
+"""
+GUID | Test/Test.guid | DEBUG | MSFT
+"""
+
+#
+# Have 5 elements, Type | FileName | Target | Family | TagName
+#
+SectionStringsCommonItem8 = \
+"""
+GUID | Test/Test.guid | DEBUG | MSFT | TEST
+"""
+
+#
+# Have 6 elements, Type | FileName | Target | Family | TagName | FFE
+#
+SectionStringsCommonItem9 = \
+"""
+GUID | Test/Test.guid | DEBUG | MSFT | TEST | TRUE
+"""
+
+#
+# Have 7 elements, Type | FileName | Target | Family | TagName | FFE | Overflow
+# Test wrong format
+#
+SectionStringsCommonItem10 = \
+"""
+GUID | Test/Test.guid | DEBUG | MSFT | TEST | TRUE | OVERFLOW
+"""
+
+#-------------end of common binary item test input----------------------------#
+
+
+
+#-------------start of VER type binary item test input------------------------#
+
+#
+# Has 1 element, error format
+#
+SectionStringsVerItem1 = \
+"""
+VER
+"""
+#
+# Have 5 elements, error format(Maximum elements amount is 4)
+#
+SectionStringsVerItem2 = \
+"""
+VER | Test/Test.ver | * | TRUE | OverFlow
+"""
+
+#
+# Have 2 elements, Type | FileName
+#
+SectionStringsVerItem3 = \
+"""
+VER | Test/Test.ver
+"""
+
+#
+# Have 3 elements, Type | FileName | Target
+#
+SectionStringsVerItem4 = \
+"""
+VER | Test/Test.ver | DEBUG
+"""
+
+#
+# Have 4 elements, Type | FileName | Target | FeatureFlagExp
+#
+SectionStringsVerItem5 = \
+"""
+VER | Test/Test.ver | DEBUG | TRUE
+"""
+
+#
+# Exist 2 VER items, both opened.
+#
+SectionStringsVerItem6 = \
+"""
+VER | Test/Test.ver | * | TRUE
+VER | Test/Test2.ver | * | TRUE
+"""
+
+
+#
+# Exist 2 VER items, only 1 opened.
+#
+SectionStringsVerItem7 = \
+"""
+VER | Test/Test.ver | * | TRUE
+VER | Test/Test2.ver | * | FALSE
+"""
+
+#-------------end of VER type binary item test input--------------------------#
+
+
+#-------------start of UI type binary item test input-------------------------#
+
+#
+# Test only one UI section can exist
+#
+SectionStringsUiItem1 = \
+"""
+UI | Test/Test.ui | * | TRUE
+UI | Test/Test2.ui | * | TRUE
+"""
+
+SectionStringsUiItem2 = \
+"""
+UI | Test/Test.ui | * | TRUE
+SEC_UI | Test/Test2.ui | * | TRUE
+"""
+
+SectionStringsUiItem3 = \
+"""
+UI | Test/Test.ui | * | TRUE
+UI | Test/Test2.ui | * | FALSE
+"""
+
+#
+# Has 1 element, error format
+#
+SectionStringsUiItem4 = \
+"""
+UI
+"""
+#
+# Have 5 elements, error format(Maximum elements amount is 4)
+#
+SectionStringsUiItem5 = \
+"""
+UI | Test/Test.ui | * | TRUE | OverFlow
+"""
+
+#
+# Have 2 elements, Type | FileName
+#
+SectionStringsUiItem6 = \
+"""
+UI | Test/Test.ui
+"""
+
+#
+# Have 3 elements, Type | FileName | Target
+#
+SectionStringsUiItem7 = \
+"""
+UI | Test/Test.ui | DEBUG
+"""
+
+#
+# Have 4 elements, Type | FileName | Target | FeatureFlagExp
+#
+SectionStringsUiItem8 = \
+"""
+UI | Test/Test.ui | DEBUG | TRUE
+"""
+#---------------end of UI type binary item test input-------------------------#
+
+
+gFileName = "BinarySectionTest.inf"
+
+##
+# Construct SectionString for call section parser usage.
+#
+def StringToSectionString(String):
+ Lines = String.split('\n')
+ LineNo = 0
+ SectionString = []
+ for Line in Lines:
+ if Line.strip() == '':
+ continue
+ SectionString.append((Line, LineNo, ''))
+ LineNo = LineNo + 1
+
+ return SectionString
+
+def PrepareTest(String):
+ SectionString = StringToSectionString(String)
+ ItemList = []
+ for Item in SectionString:
+ ValueList = Item[0].split('|')
+ for count in range(len(ValueList)):
+ ValueList[count] = ValueList[count].strip()
+ if len(ValueList) >= 2:
+ #
+ # Create a temp file for test.
+ #
+ FileName = os.path.normpath(os.path.realpath(ValueList[1].strip()))
+ try:
+ TempFile = open (FileName, "w")
+ TempFile.close()
+ except:
+ print("File Create Error")
+ CurrentLine = CurrentLine()
+ CurrentLine.SetFileName("Test")
+ CurrentLine.SetLineString(Item[0])
+ CurrentLine.SetLineNo(Item[1])
+ InfLineCommentObject = InfLineCommentObject()
+
+ ItemList.append((ValueList, InfLineCommentObject, CurrentLine))
+
+ return ItemList
+
+if __name__ == '__main__':
+ Logger.Initialize()
+
+ InfBinariesInstance = InfBinariesObject()
+ ArchList = ['COMMON']
+ Global.gINF_MODULE_DIR = os.getcwd()
+
+ AllPassedFlag = True
+
+ #
+ # For All Ui test
+ #
+ UiStringList = [
+ SectionStringsUiItem1,
+ SectionStringsUiItem2,
+ SectionStringsUiItem3,
+ SectionStringsUiItem4,
+ SectionStringsUiItem5,
+ SectionStringsUiItem6,
+ SectionStringsUiItem7,
+ SectionStringsUiItem8
+ ]
+
+ for Item in UiStringList:
+ Ui = PrepareTest(Item)
+ if Item == SectionStringsUiItem4 or Item == SectionStringsUiItem5:
+ try:
+ InfBinariesInstance.SetBinary(Ui = Ui, ArchList = ArchList)
+ except Logger.FatalError:
+ pass
+ else:
+ try:
+ InfBinariesInstance.SetBinary(Ui = Ui, ArchList = ArchList)
+ except:
+ AllPassedFlag = False
+
+ #
+ # For All Ver Test
+ #
+ VerStringList = [
+ SectionStringsVerItem1,
+ SectionStringsVerItem2,
+ SectionStringsVerItem3,
+ SectionStringsVerItem4,
+ SectionStringsVerItem5,
+ SectionStringsVerItem6,
+ SectionStringsVerItem7
+ ]
+ for Item in VerStringList:
+ Ver = PrepareTest(Item)
+ if Item == SectionStringsVerItem1 or \
+ Item == SectionStringsVerItem2:
+
+ try:
+ InfBinariesInstance.SetBinary(Ver = Ver, ArchList = ArchList)
+ except:
+ pass
+
+ else:
+ try:
+ InfBinariesInstance.SetBinary(Ver = Ver, ArchList = ArchList)
+ except:
+ AllPassedFlag = False
+
+ #
+ # For All Common Test
+ #
+ CommonStringList = [
+ SectionStringsCommonItem1,
+ SectionStringsCommonItem2,
+ SectionStringsCommonItem3,
+ SectionStringsCommonItem4,
+ SectionStringsCommonItem5,
+ SectionStringsCommonItem6,
+ SectionStringsCommonItem7,
+ SectionStringsCommonItem8,
+ SectionStringsCommonItem9,
+ SectionStringsCommonItem10
+ ]
+
+ for Item in CommonStringList:
+ CommonBin = PrepareTest(Item)
+ if Item == SectionStringsCommonItem10 or \
+ Item == SectionStringsCommonItem1:
+
+ try:
+ InfBinariesInstance.SetBinary(CommonBinary = CommonBin, ArchList = ArchList)
+ except:
+ pass
+
+ else:
+ try:
+ InfBinariesInstance.SetBinary(Ver = Ver, ArchList = ArchList)
+ except:
+ print("Test Failed!")
+ AllPassedFlag = False
+
+ if AllPassedFlag :
+ print('All tests passed...')
+ else:
+ print('Some unit test failed!')
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/CommonXml.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/CommonXml.py
new file mode 100755
index 00000000..8c227442
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/CommonXml.py
@@ -0,0 +1,997 @@
+## @file
+# This file is used to parse a PCD file of .PKG file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+CommonXml
+'''
+
+##
+# Import Modules
+#
+
+from Core.DistributionPackageClass import DistributionPackageHeaderObject
+from Library.StringUtils import ConvertNEToNOTEQ
+from Library.StringUtils import ConvertNOTEQToNE
+from Library.StringUtils import GetSplitValueList
+from Library.StringUtils import GetStringOfList
+from Library.Xml.XmlRoutines import XmlElement
+from Library.Xml.XmlRoutines import XmlElement2
+from Library.Xml.XmlRoutines import XmlAttribute
+from Library.Xml.XmlRoutines import XmlNode
+from Library.Xml.XmlRoutines import XmlList
+from Library.Xml.XmlRoutines import CreateXmlElement
+from Library.UniClassObject import ConvertSpecialUnicodes
+from Library.UniClassObject import GetLanguageCode1766
+from Object.POM.CommonObject import FileObject
+from Object.POM.CommonObject import MiscFileObject
+from Object.POM.CommonObject import UserExtensionObject
+from Object.POM.CommonObject import ClonedRecordObject
+from Object.POM.CommonObject import LibraryClassObject
+from Object.POM.CommonObject import FileNameObject
+from Object.POM.ModuleObject import ModuleObject
+from Xml.XmlParserMisc import IsRequiredItemListNull
+from Xml.XmlParserMisc import GetHelpTextList
+import Library.DataType as DataType
+
+##
+# ClonedFromXml
+#
+class ClonedFromXml(object):
+ def __init__(self):
+ self.GUID = ''
+ self.Version = ''
+
+ def FromXml(self, Item, Key):
+ self.GUID = XmlElement(Item, '%s/GUID' % Key)
+ self.Version = XmlAttribute(XmlNode(Item, '%s/GUID' % Key), 'Version')
+ if self.GUID == '' and self.Version == '':
+ return None
+ ClonedFrom = ClonedRecordObject()
+ ClonedFrom.SetPackageGuid(self.GUID)
+ ClonedFrom.SetPackageVersion(self.Version)
+ return ClonedFrom
+
+ def ToXml(self, ClonedFrom, Key):
+ if self.GUID:
+ pass
+ Element1 = CreateXmlElement('GUID', ClonedFrom.GetPackageGuid(), [],
+ [['Version', ClonedFrom.GetPackageVersion()]])
+ AttributeList = []
+ NodeList = [Element1]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+ return Root
+
+ def __str__(self):
+ return "GUID = %s Version = %s" % (self.GUID, self.Version)
+
+
+##
+# CommonDefinesXml
+#
+class CommonDefinesXml(object):
+ def __init__(self):
+ self.Usage = ''
+ self.SupArchList = []
+ self.SupModList = []
+ self.FeatureFlag = ''
+
+ def FromXml(self, Item, Key):
+ if Key:
+ pass
+ self.Usage = XmlAttribute(Item, 'Usage')
+ self.SupArchList = \
+ [Arch for Arch in GetSplitValueList(XmlAttribute(Item, 'SupArchList'), DataType.TAB_SPACE_SPLIT) if Arch]
+ self.SupModList = \
+ [Mod for Mod in GetSplitValueList(XmlAttribute(Item, 'SupModList'), DataType.TAB_SPACE_SPLIT) if Mod]
+ self.FeatureFlag = ConvertNOTEQToNE(XmlAttribute(Item, 'FeatureFlag'))
+
+ def ToXml(self):
+ pass
+
+ def __str__(self):
+ return "Usage = %s SupArchList = %s SupModList = %s FeatureFlag = %s" \
+ % (self.Usage, self.SupArchList, self.SupModList, self.FeatureFlag)
+
+##
+# PromptXml
+#
+class PromptXml(object):
+ def __init__(self):
+ self.Prompt = ''
+ self.Lang = ''
+
+ def FromXml(self, Item, Key):
+ if Key:
+ pass
+ self.Prompt = XmlElement2(Item, 'Prompt')
+ self.Lang = XmlAttribute(Item, 'Lang')
+
+ def ToXml(self, Prompt, Key='Prompt'):
+ if self.Prompt:
+ pass
+ return CreateXmlElement('%s' % Key, Prompt.GetString(), [], [['Lang', Prompt.GetLang()]])
+ def __str__(self):
+ return "Prompt = %s Lang = %s" % (self.Prompt, self.Lang)
+
+##
+# HelpTextXml
+#
+class HelpTextXml(object):
+ def __init__(self):
+ self.HelpText = ''
+ self.Lang = ''
+
+ def FromXml(self, Item, Key):
+ if Key:
+ pass
+ self.HelpText = XmlElement2(Item, 'HelpText')
+ self.Lang = XmlAttribute(Item, 'Lang')
+
+ def ToXml(self, HelpText, Key='HelpText'):
+ if self.HelpText:
+ pass
+ return CreateXmlElement('%s' % Key, HelpText.GetString(), [], [['Lang', HelpText.GetLang()]])
+ def __str__(self):
+ return "HelpText = %s Lang = %s" % (self.HelpText, self.Lang)
+
+##
+# HeaderXml
+#
+class HeaderXml(object):
+ def __init__(self):
+ self.Name = ''
+ self.BaseName = ''
+ self.GUID = ''
+ self.Version = ''
+ self.CopyrightList = []
+ self.LicenseList = []
+ self.AbstractList = []
+ self.DescriptionList = []
+
+ def FromXml(self, Item, Key, IsRequiredCheck=False, IsStandAlongModule=False):
+ if not Item and IsRequiredCheck:
+ XmlTreeLevel = []
+ if IsStandAlongModule:
+ XmlTreeLevel = ['DistributionPackage', 'ModuleSurfaceArea']
+ else:
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'ModuleSurfaceArea']
+ CheckDict = {'Header':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ self.Name = XmlElement(Item, '%s/Name' % Key)
+ self.BaseName = XmlAttribute(XmlNode(Item, '%s/Name' % Key), 'BaseName')
+ self.GUID = XmlElement(Item, '%s/GUID' % Key)
+ self.Version = XmlAttribute(XmlNode(Item, '%s/GUID' % Key), 'Version')
+
+ for SubItem in XmlList(Item, '%s/Abstract' % Key):
+ HeaderAbstractLang = XmlAttribute(SubItem, 'Lang')
+ self.AbstractList.append((HeaderAbstractLang, XmlElement(SubItem, '%s/Abstract' % Key)))
+ for SubItem in XmlList(Item, '%s/Description' % Key):
+ HeaderDescriptionLang = XmlAttribute(SubItem, 'Lang')
+ self.DescriptionList.append((HeaderDescriptionLang, XmlElement(SubItem, '%s/Description' % Key)))
+ for SubItem in XmlList(Item, '%s/Copyright' % Key):
+ HeaderCopyrightLang = XmlAttribute(SubItem, 'Lang')
+ self.CopyrightList.append((HeaderCopyrightLang, XmlElement(SubItem, '%s/Copyright' % Key)))
+ for SubItem in XmlList(Item, '%s/License' % Key):
+ HeaderLicenseLang = XmlAttribute(SubItem, 'Lang')
+ self.LicenseList.append((HeaderLicenseLang, XmlElement(SubItem, '%s/License' % Key)))
+ ModuleHeader = ModuleObject()
+ ModuleHeader.SetName(self.Name)
+ ModuleHeader.SetBaseName(self.BaseName)
+ ModuleHeader.SetGuid(self.GUID)
+ ModuleHeader.SetVersion(self.Version)
+ ModuleHeader.SetCopyright(self.CopyrightList)
+ ModuleHeader.SetLicense(self.LicenseList)
+ ModuleHeader.SetAbstract(self.AbstractList)
+ ModuleHeader.SetDescription(self.DescriptionList)
+ return ModuleHeader
+
+ def ToXml(self, Header, Key):
+ if self.GUID:
+ pass
+ Element1 = CreateXmlElement('Name', Header.GetName(), [], [['BaseName', Header.GetBaseName()]])
+ Element2 = CreateXmlElement('GUID', Header.GetGuid(), [], [['Version', Header.GetVersion()]])
+ NodeList = [Element1,
+ Element2,
+ ]
+
+ UNIInfAbstractList = []
+ UNIInfDescriptionList = []
+ # Get Abstract and Description from Uni File
+ # if the Uni File exists
+ if Header.UniFileClassObject is not None:
+ UniStrDict = Header.UniFileClassObject.OrderedStringList
+ for Lang in UniStrDict:
+ for StringDefClassObject in UniStrDict[Lang]:
+ if not StringDefClassObject.StringValue:
+ continue
+ if StringDefClassObject.StringName == DataType.TAB_INF_ABSTRACT:
+ UNIInfAbstractList.append((GetLanguageCode1766(Lang),
+ ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
+
+ if StringDefClassObject.StringName == DataType.TAB_INF_DESCRIPTION:
+ UNIInfDescriptionList.append((GetLanguageCode1766(Lang),
+ ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
+
+ # Get Abstract and Description from INF File Header
+ for (Lang, Value) in Header.GetCopyright():
+ if Value:
+ NodeList.append(CreateXmlElement('Copyright', Value, [], []))
+ for (Lang, Value) in Header.GetLicense():
+ if Value:
+ NodeList.append(CreateXmlElement('License', Value, [], []))
+ for (Lang, Value) in Header.GetAbstract() + UNIInfAbstractList:
+ if Value:
+ NodeList.append(CreateXmlElement('Abstract', Value, [], [['Lang', Lang]]))
+ for (Lang, Value) in Header.GetDescription() + UNIInfDescriptionList:
+ if Value:
+ NodeList.append(CreateXmlElement('Description', Value, [], [['Lang', Lang]]))
+
+ AttributeList = []
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+ return Root
+
+ def __str__(self):
+ return "Name = %s BaseName = %s GUID = %s Version = %s Copyright = %s \
+ License = %s Abstract = %s Description = %s" % \
+ (self.Name, self.BaseName, self.GUID, self.Version, self.CopyrightList, \
+ self.LicenseList, self.AbstractList, self.DescriptionList)
+##
+# DistributionPackageHeaderXml
+#
+class DistributionPackageHeaderXml(object):
+ def __init__(self):
+ self.Header = HeaderXml()
+ self.ReadOnly = ''
+ self.RePackage = ''
+ self.Vendor = ''
+ self.Date = ''
+ self.Signature = ''
+ self.XmlSpecification = ''
+
+ def FromXml(self, Item, Key):
+ if not Item:
+ return None
+ self.ReadOnly = XmlAttribute(XmlNode(Item, '%s' % Key), 'ReadOnly')
+ self.RePackage = XmlAttribute(XmlNode(Item, '%s' % Key), 'RePackage')
+ self.Vendor = XmlElement(Item, '%s/Vendor' % Key)
+ self.Date = XmlElement(Item, '%s/Date' % Key)
+ self.Signature = XmlElement(Item, '%s/Signature' % Key)
+ self.XmlSpecification = XmlElement(Item, '%s/XmlSpecification' % Key)
+ self.Header.FromXml(Item, Key)
+ DistributionPackageHeader = DistributionPackageHeaderObject()
+ if self.ReadOnly.upper() == 'TRUE':
+ DistributionPackageHeader.ReadOnly = True
+ elif self.ReadOnly.upper() == 'FALSE':
+ DistributionPackageHeader.ReadOnly = False
+ if self.RePackage.upper() == 'TRUE':
+ DistributionPackageHeader.RePackage = True
+ elif self.RePackage.upper() == 'FALSE':
+ DistributionPackageHeader.RePackage = False
+ DistributionPackageHeader.Vendor = self.Vendor
+ DistributionPackageHeader.Date = self.Date
+ DistributionPackageHeader.Signature = self.Signature
+ DistributionPackageHeader.XmlSpecification = self.XmlSpecification
+ DistributionPackageHeader.SetName(self.Header.Name)
+ DistributionPackageHeader.SetBaseName(self.Header.BaseName)
+ DistributionPackageHeader.SetGuid(self.Header.GUID)
+ DistributionPackageHeader.SetVersion(self.Header.Version)
+ DistributionPackageHeader.SetCopyright(self.Header.CopyrightList)
+ DistributionPackageHeader.SetLicense(self.Header.LicenseList)
+ DistributionPackageHeader.SetAbstract(self.Header.AbstractList)
+ DistributionPackageHeader.SetDescription(self.Header.DescriptionList)
+ return DistributionPackageHeader
+
+ def ToXml(self, DistributionPackageHeader, Key):
+ if self.Header:
+ pass
+ Element1 = CreateXmlElement('Name', \
+ DistributionPackageHeader.GetName(), [], \
+ [['BaseName', \
+ DistributionPackageHeader.GetBaseName()]])
+ Element2 = CreateXmlElement('GUID', \
+ DistributionPackageHeader.GetGuid(), [], \
+ [['Version', \
+ DistributionPackageHeader.GetVersion()]])
+ AttributeList = []
+ if DistributionPackageHeader.ReadOnly != '':
+ AttributeList.append(['ReadOnly', str(DistributionPackageHeader.ReadOnly).lower()])
+ if DistributionPackageHeader.RePackage != '':
+ AttributeList.append(['RePackage', str(DistributionPackageHeader.RePackage).lower()])
+ if DistributionPackageHeader.GetAbstract():
+ DPAbstract = DistributionPackageHeader.GetAbstract()[0][1]
+ else:
+ DPAbstract = ''
+ if DistributionPackageHeader.GetDescription():
+ DPDescription = DistributionPackageHeader.GetDescription()[0][1]
+ else:
+ DPDescription = ''
+ if DistributionPackageHeader.GetCopyright():
+ DPCopyright = DistributionPackageHeader.GetCopyright()[0][1]
+ else:
+ DPCopyright = ''
+ if DistributionPackageHeader.GetLicense():
+ DPLicense = DistributionPackageHeader.GetLicense()[0][1]
+ else:
+ DPLicense = ''
+ NodeList = [Element1,
+ Element2,
+ ['Vendor', DistributionPackageHeader.Vendor],
+ ['Date', DistributionPackageHeader.Date],
+ ['Copyright', DPCopyright],
+ ['License', DPLicense],
+ ['Abstract', DPAbstract],
+ ['Description', DPDescription],
+ ['Signature', DistributionPackageHeader.Signature],
+ ['XmlSpecification', \
+ DistributionPackageHeader.XmlSpecification],
+ ]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+ return Root
+
+ def __str__(self):
+ return "ReadOnly = %s RePackage = %s Vendor = %s Date = %s \
+ Signature = %s XmlSpecification = %s %s" % \
+ (self.ReadOnly, self.RePackage, self.Vendor, self.Date, \
+ self.Signature, self.XmlSpecification, self.Header)
+##
+# PackageHeaderXml
+#
+class PackageHeaderXml(object):
+ def __init__(self):
+ self.Header = HeaderXml()
+ self.PackagePath = ''
+
+ def FromXml(self, Item, Key, PackageObject2):
+ if not Item:
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea']
+ CheckDict = {'PackageHeader': None, }
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ self.PackagePath = XmlElement(Item, '%s/PackagePath' % Key)
+ self.Header.FromXml(Item, Key)
+ PackageObject2.SetName(self.Header.Name)
+ PackageObject2.SetBaseName(self.Header.BaseName)
+ PackageObject2.SetGuid(self.Header.GUID)
+ PackageObject2.SetVersion(self.Header.Version)
+ PackageObject2.SetCopyright(self.Header.CopyrightList)
+ PackageObject2.SetLicense(self.Header.LicenseList)
+ PackageObject2.SetAbstract(self.Header.AbstractList)
+ PackageObject2.SetDescription(self.Header.DescriptionList)
+ PackageObject2.SetPackagePath(self.PackagePath)
+
+ def ToXml(self, PackageObject2, Key):
+ if self.PackagePath:
+ pass
+ Element1 = CreateXmlElement('Name', PackageObject2.GetName(), [], \
+ [['BaseName', PackageObject2.GetBaseName()]])
+ Element2 = CreateXmlElement('GUID', PackageObject2.GetGuid(), [], \
+ [['Version', PackageObject2.GetVersion()]])
+ NodeList = [Element1,
+ Element2
+ ]
+
+ UNIPackageAbrstractList = []
+ UNIPackageDescriptionList = []
+ # Get Abstract and Description from Uni File
+ # if the Uni File exists
+ if PackageObject2.UniFileClassObject is not None:
+ UniStrDict = PackageObject2.UniFileClassObject.OrderedStringList
+ for Lang in UniStrDict:
+ for StringDefClassObject in UniStrDict[Lang]:
+ if not StringDefClassObject.StringValue:
+ continue
+ if StringDefClassObject.StringName == DataType.TAB_DEC_PACKAGE_ABSTRACT:
+ UNIPackageAbrstractList.append((GetLanguageCode1766(Lang),
+ ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
+
+ if StringDefClassObject.StringName == DataType.TAB_DEC_PACKAGE_DESCRIPTION:
+ UNIPackageDescriptionList.append((GetLanguageCode1766(Lang),
+ ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
+
+ # Get Abstract and Description from DEC File Header
+ for (Lang, Value) in PackageObject2.GetCopyright():
+ if Value:
+ NodeList.append(CreateXmlElement(DataType.TAB_HEADER_COPYRIGHT, Value, [], []))
+ for (Lang, Value) in PackageObject2.GetLicense():
+ if Value:
+ NodeList.append(CreateXmlElement(DataType.TAB_HEADER_LICENSE, Value, [], []))
+ for (Lang, Value) in PackageObject2.GetAbstract() + UNIPackageAbrstractList:
+ if Value:
+ NodeList.append(CreateXmlElement(DataType.TAB_HEADER_ABSTRACT, Value, [], [['Lang', Lang]]))
+ for (Lang, Value) in PackageObject2.GetDescription() + UNIPackageDescriptionList:
+ if Value:
+ NodeList.append(CreateXmlElement(DataType.TAB_HEADER_DESCRIPTION, Value, [], [['Lang', Lang]]))
+
+
+ NodeList.append(['PackagePath', PackageObject2.GetPackagePath()])
+ AttributeList = []
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+ return Root
+
+ def __str__(self):
+ return "PackagePath = %s %s" \
+ % (self.PackagePath, self.Header)
+
+##
+# MiscellaneousFileXml
+#
+class MiscellaneousFileXml(object):
+ def __init__(self):
+ self.Header = HeaderXml()
+ self.Files = []
+ ##
+ # This API is used for Package or Module's MiscellaneousFile section
+ #
+ def FromXml(self, Item, Key):
+ if not Item:
+ return None
+ self.Header.FromXml(Item, Key)
+ NewItem = XmlNode(Item, '%s/Header' % Key)
+ self.Header.FromXml(NewItem, 'Header')
+ for SubItem in XmlList(Item, '%s/Filename' % Key):
+ Filename = XmlElement(SubItem, '%s/Filename' % Key)
+ Executable = XmlAttribute(XmlNode(SubItem, '%s/Filename' % Key), 'Executable')
+ if Executable.upper() == "TRUE":
+ Executable = True
+ elif Executable.upper() == "FALSE":
+ Executable = False
+ else:
+ Executable = ''
+ self.Files.append([Filename, Executable])
+ MiscFile = MiscFileObject()
+ MiscFile.SetCopyright(self.Header.CopyrightList)
+ MiscFile.SetLicense(self.Header.LicenseList)
+ MiscFile.SetAbstract(self.Header.AbstractList)
+ MiscFile.SetDescription(self.Header.DescriptionList)
+ MiscFileList = []
+ for File in self.Files:
+ FileObj = FileObject()
+ FileObj.SetURI(File[0])
+ FileObj.SetExecutable(File[1])
+ MiscFileList.append(FileObj)
+ MiscFile.SetFileList(MiscFileList)
+ return MiscFile
+ ##
+ # This API is used for DistP's tool section
+ #
+ def FromXml2(self, Item, Key):
+ if Item is None:
+ return None
+ NewItem = XmlNode(Item, '%s/Header' % Key)
+ self.Header.FromXml(NewItem, 'Header')
+ for SubItem in XmlList(Item, '%s/Filename' % Key):
+ Filename = XmlElement(SubItem, '%s/Filename' % Key)
+ Executable = \
+ XmlAttribute(XmlNode(SubItem, '%s/Filename' % Key), 'Executable')
+ OsType = XmlAttribute(XmlNode(SubItem, '%s/Filename' % Key), 'OS')
+ if Executable.upper() == "TRUE":
+ Executable = True
+ elif Executable.upper() == "FALSE":
+ Executable = False
+ else:
+ Executable = ''
+ self.Files.append([Filename, Executable, OsType])
+ MiscFile = MiscFileObject()
+ MiscFile.SetName(self.Header.Name)
+ MiscFile.SetCopyright(self.Header.CopyrightList)
+ MiscFile.SetLicense(self.Header.LicenseList)
+ MiscFile.SetAbstract(self.Header.AbstractList)
+ MiscFile.SetDescription(self.Header.DescriptionList)
+ MiscFileList = []
+ for File in self.Files:
+ FileObj = FileObject()
+ FileObj.SetURI(File[0])
+ FileObj.SetExecutable(File[1])
+ FileObj.SetOS(File[2])
+ MiscFileList.append(FileObj)
+ MiscFile.SetFileList(MiscFileList)
+ return MiscFile
+
+ ##
+ # This API is used for Package or Module's MiscellaneousFile section
+ #
+ def ToXml(self, MiscFile, Key):
+ if self.Header:
+ pass
+ if MiscFile:
+ if MiscFile.GetAbstract():
+ DPAbstract = MiscFile.GetAbstract()[0][1]
+ else:
+ DPAbstract = ''
+ if MiscFile.GetDescription():
+ DPDescription = MiscFile.GetDescription()[0][1]
+ else:
+ DPDescription = ''
+ if MiscFile.GetCopyright():
+ DPCopyright = MiscFile.GetCopyright()[0][1]
+ else:
+ DPCopyright = ''
+ if MiscFile.GetLicense():
+ DPLicense = MiscFile.GetLicense()[0][1]
+ else:
+ DPLicense = ''
+ NodeList = [['Copyright', DPCopyright],
+ ['License', DPLicense],
+ ['Abstract', DPAbstract],
+ ['Description', DPDescription],
+ ]
+ for File in MiscFile.GetFileList():
+ NodeList.append\
+ (CreateXmlElement\
+ ('Filename', File.GetURI(), [], \
+ [['Executable', str(File.GetExecutable()).lower()]]))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, [])
+ return Root
+ ##
+ # This API is used for DistP's tool section
+ #
+ def ToXml2(self, MiscFile, Key):
+ if self.Header:
+ pass
+ if MiscFile:
+ if MiscFile.GetAbstract():
+ DPAbstract = MiscFile.GetAbstract()[0][1]
+ else:
+ DPAbstract = ''
+ if MiscFile.GetDescription():
+ DPDescription = MiscFile.GetDescription()[0][1]
+ else:
+ DPDescription = ''
+ if MiscFile.GetCopyright():
+ DPCopyright = MiscFile.GetCopyright()[0][1]
+ else:
+ DPCopyright = ''
+ if MiscFile.GetLicense():
+ DPLicense = MiscFile.GetLicense()[0][1]
+ else:
+ DPLicense = ''
+ NodeList = [['Name', MiscFile.GetName()],
+ ['Copyright', DPCopyright],
+ ['License', DPLicense],
+ ['Abstract', DPAbstract],
+ ['Description', DPDescription],
+ ]
+ HeaderNode = CreateXmlElement('Header', '', NodeList, [])
+ NodeList = [HeaderNode]
+ for File in MiscFile.GetFileList():
+ NodeList.append\
+ (CreateXmlElement\
+ ('Filename', File.GetURI(), [], \
+ [['Executable', str(File.GetExecutable()).lower()], \
+ ['OS', File.GetOS()]]))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, [])
+ return Root
+
+ def __str__(self):
+ Str = str(self.Header)
+ for Item in self.Files:
+ Str = Str + '\n\tFilename:' + str(Item)
+ return Str
+##
+# UserExtensionsXml
+#
+class UserExtensionsXml(object):
+ def __init__(self):
+ self.UserId = ''
+ self.Identifier = ''
+ self.BinaryAbstractList = []
+ self.BinaryDescriptionList = []
+ self.BinaryCopyrightList = []
+ self.BinaryLicenseList = []
+ self.LangDefsList = []
+ self.DefineDict = {}
+ self.BuildOptionDict = {}
+ self.IncludesDict = {}
+ self.SourcesDict = {}
+ self.BinariesDict = {}
+ self.SupArchList = []
+ self.Statement = ''
+ self.Defines = ''
+ self.BuildOptions = ''
+
+ def FromXml2(self, Item, Key):
+ self.UserId = XmlAttribute(XmlNode(Item, '%s' % Key), 'UserId')
+ self.Identifier = XmlAttribute(XmlNode(Item, '%s' % Key), 'Identifier')
+ UserExtension = UserExtensionObject()
+ UserExtension.SetUserID(self.UserId)
+ UserExtension.SetIdentifier(self.Identifier)
+ return UserExtension
+
+ def FromXml(self, Item, Key):
+ self.UserId = XmlAttribute(XmlNode(Item, '%s' % Key), 'UserId')
+ self.Identifier = XmlAttribute(XmlNode(Item, '%s' % Key), 'Identifier')
+ if self.UserId == DataType.TAB_BINARY_HEADER_USERID \
+ and self.Identifier == DataType.TAB_BINARY_HEADER_IDENTIFIER:
+ for SubItem in XmlList(Item, '%s/BinaryAbstract' % Key):
+ BinaryAbstractLang = XmlAttribute(SubItem, 'Lang')
+ self.BinaryAbstractList.append((BinaryAbstractLang, XmlElement(SubItem, '%s/BinaryAbstract' % Key)))
+ for SubItem in XmlList(Item, '%s/BinaryDescription' % Key):
+ BinaryDescriptionLang = XmlAttribute(SubItem, 'Lang')
+ self.BinaryDescriptionList.append((BinaryDescriptionLang,
+ XmlElement(SubItem, '%s/BinaryDescription' % Key)))
+ for SubItem in XmlList(Item, '%s/BinaryCopyright' % Key):
+ BinaryCopyrightLang = XmlAttribute(SubItem, 'Lang')
+ self.BinaryCopyrightList.append((BinaryCopyrightLang,
+ XmlElement(SubItem, '%s/BinaryCopyright' % Key)))
+ for SubItem in XmlList(Item, '%s/BinaryLicense' % Key):
+ BinaryLicenseLang = XmlAttribute(SubItem, 'Lang')
+ self.BinaryLicenseList.append((BinaryLicenseLang,
+ XmlElement(SubItem, '%s/BinaryLicense' % Key)))
+
+ DefineItem = XmlNode(Item, '%s/Define' % Key)
+ for SubItem in XmlList(DefineItem, 'Define/Statement'):
+ Statement = XmlElement(SubItem, '%s/Statement' % Key)
+ self.DefineDict[Statement] = ""
+ BuildOptionItem = XmlNode(Item, '%s/BuildOption' % Key)
+ for SubItem in XmlList(BuildOptionItem, 'BuildOption/Statement'):
+ Statement = XmlElement(SubItem, '%s/Statement' % Key)
+ Arch = XmlAttribute(XmlNode(SubItem, '%s/Statement' % Key), 'SupArchList')
+ self.BuildOptionDict[Arch] = Statement
+ IncludesItem = XmlNode(Item, '%s/Includes' % Key)
+ for SubItem in XmlList(IncludesItem, 'Includes/Statement'):
+ Statement = XmlElement(SubItem, '%s/Statement' % Key)
+ Arch = XmlAttribute(XmlNode(SubItem, '%s/Statement' % Key), 'SupArchList')
+ self.IncludesDict[Statement] = Arch
+ SourcesItem = XmlNode(Item, '%s/Sources' % Key)
+ Tmp = UserExtensionSourceXml()
+ SourceDict = Tmp.FromXml(SourcesItem, 'Sources')
+ self.SourcesDict = SourceDict
+ BinariesItem = XmlNode(Item, '%s/Binaries' % Key)
+ Tmp = UserExtensionBinaryXml()
+ BinariesDict = Tmp.FromXml(BinariesItem, 'Binaries')
+ self.BinariesDict = BinariesDict
+ self.Statement = XmlElement(Item, 'UserExtensions')
+ SupArch = XmlAttribute(XmlNode(Item, '%s' % Key), 'SupArchList')
+ self.SupArchList = [Arch for Arch in GetSplitValueList(SupArch, DataType.TAB_SPACE_SPLIT) if Arch]
+ UserExtension = UserExtensionObject()
+ UserExtension.SetUserID(self.UserId)
+ UserExtension.SetIdentifier(self.Identifier)
+ UserExtension.SetBinaryAbstract(self.BinaryAbstractList)
+ UserExtension.SetBinaryDescription(self.BinaryDescriptionList)
+ UserExtension.SetBinaryCopyright(self.BinaryCopyrightList)
+ UserExtension.SetBinaryLicense(self.BinaryLicenseList)
+ UserExtension.SetStatement(self.Statement)
+ UserExtension.SetSupArchList(self.SupArchList)
+ UserExtension.SetDefinesDict(self.DefineDict)
+ UserExtension.SetBuildOptionDict(self.BuildOptionDict)
+ UserExtension.SetIncludesDict(self.IncludesDict)
+ UserExtension.SetSourcesDict(self.SourcesDict)
+ UserExtension.SetBinariesDict(self.BinariesDict)
+ return UserExtension
+
+ def ToXml(self, UserExtension, Key):
+ if self.UserId:
+ pass
+ AttributeList = [['UserId', str(UserExtension.GetUserID())],
+ ['Identifier', str(UserExtension.GetIdentifier())],
+ ['SupArchList', \
+ GetStringOfList(UserExtension.GetSupArchList())],
+ ]
+ Root = CreateXmlElement('%s' % Key, UserExtension.GetStatement(), [], \
+ AttributeList)
+ if UserExtension.GetIdentifier() == DataType.TAB_BINARY_HEADER_IDENTIFIER and \
+ UserExtension.GetUserID() == DataType.TAB_BINARY_HEADER_USERID:
+ for (Lang, Value) in UserExtension.GetBinaryAbstract():
+ if Value:
+ ChildElement = CreateXmlElement('BinaryAbstract', Value, [], [['Lang', Lang]])
+ Root.appendChild(ChildElement)
+ for (Lang, Value) in UserExtension.GetBinaryDescription():
+ if Value:
+ ChildElement = CreateXmlElement('BinaryDescription', Value, [], [['Lang', Lang]])
+ Root.appendChild(ChildElement)
+ for (Lang, Value) in UserExtension.GetBinaryCopyright():
+ if Value:
+ ChildElement = CreateXmlElement('BinaryCopyright', Value, [], [])
+ Root.appendChild(ChildElement)
+ for (Lang, Value) in UserExtension.GetBinaryLicense():
+ if Value:
+ ChildElement = CreateXmlElement('BinaryLicense', Value, [], [])
+ Root.appendChild(ChildElement)
+
+ NodeList = []
+ DefineDict = UserExtension.GetDefinesDict()
+ if DefineDict:
+ for Item in DefineDict.keys():
+ NodeList.append(CreateXmlElement\
+ ('Statement', Item, [], []))
+ DefineElement = CreateXmlElement('Define', '', NodeList, [])
+ Root.appendChild(DefineElement)
+ NodeList = []
+ BuildOptionDict = UserExtension.GetBuildOptionDict()
+ if BuildOptionDict:
+ for Item in BuildOptionDict.keys():
+ NodeList.append(CreateXmlElement\
+ ('Statement', BuildOptionDict[Item], [], \
+ [['SupArchList', Item]]))
+ BuildOptionElement = \
+ CreateXmlElement('BuildOption', '', NodeList, [])
+ Root.appendChild(BuildOptionElement)
+ NodeList = []
+ IncludesDict = UserExtension.GetIncludesDict()
+ if IncludesDict:
+ for Item in IncludesDict.keys():
+ NodeList.append(CreateXmlElement\
+ ('Statement', Item, [], \
+ [['SupArchList', IncludesDict[Item]]]))
+ IncludesElement = CreateXmlElement('Includes', '', NodeList, [])
+ Root.appendChild(IncludesElement)
+ NodeList = []
+ SourcesDict = UserExtension.GetSourcesDict()
+ if SourcesDict:
+ Tmp = UserExtensionSourceXml()
+ Root.appendChild(Tmp.ToXml(SourcesDict, 'Sources'))
+ NodeList = []
+ BinariesDict = UserExtension.GetBinariesDict()
+ if BinariesDict:
+ Tmp = UserExtensionBinaryXml()
+ Root.appendChild(Tmp.ToXml(BinariesDict, 'Binaries'))
+ return Root
+
+ def __str__(self):
+ Str = "UserId = %s Identifier = %s" % (self.UserId, self.Identifier)
+ Str = Str + '\n\tDefines:' + str(self.Defines)
+ Str = Str + '\n\tBuildOptions:' + str(self.BuildOptions)
+ return Str
+
+##
+# UserExtensionSourceXml
+#
+class UserExtensionSourceXml(object):
+ def __init__(self):
+ self.UserExtensionSource = ''
+
+ def FromXml(self, Item, Key):
+ if Key:
+ pass
+ if self.UserExtensionSource:
+ pass
+ Dict = {}
+ #SourcesItem = XmlNode(Item, '%s/Sources' % Key)
+ for SubItem in XmlList(Item, 'Sources/SourceFile'):
+ FileName = XmlElement(SubItem, 'SourceFile/FileName')
+ Family = XmlElement(SubItem, 'SourceFile/Family')
+ FeatureFlag = XmlElement(SubItem, 'SourceFile/FeatureFlag')
+ SupArchStr = XmlElement(SubItem, 'SourceFile/SupArchList')
+ DictKey = (FileName, Family, FeatureFlag, SupArchStr)
+ ValueList = []
+ for ValueNodeItem in XmlList(SubItem, \
+ 'SourceFile/SourceFileOtherAttr'):
+ TagName = XmlElement(ValueNodeItem, \
+ 'SourceFileOtherAttr/TagName')
+ ToolCode = XmlElement(ValueNodeItem, \
+ 'SourceFileOtherAttr/ToolCode')
+ Comment = XmlElement(ValueNodeItem, \
+ 'SourceFileOtherAttr/Comment')
+ if (TagName == ' ') and (ToolCode == ' ') and (Comment == ' '):
+ TagName = ''
+ ToolCode = ''
+ Comment = ''
+ ValueList.append((TagName, ToolCode, Comment))
+ Dict[DictKey] = ValueList
+ return Dict
+
+ def ToXml(self, Dict, Key):
+ if self.UserExtensionSource:
+ pass
+ SourcesNodeList = []
+ for Item in Dict:
+ ValueList = Dict[Item]
+ (FileName, Family, FeatureFlag, SupArchStr) = Item
+ SourceFileNodeList = []
+ SourceFileNodeList.append(["FileName", FileName])
+ SourceFileNodeList.append(["Family", Family])
+ SourceFileNodeList.append(["FeatureFlag", FeatureFlag])
+ SourceFileNodeList.append(["SupArchList", SupArchStr])
+ for (TagName, ToolCode, Comment) in ValueList:
+ ValueNodeList = []
+ if not (TagName or ToolCode or Comment):
+ TagName = ' '
+ ToolCode = ' '
+ Comment = ' '
+ ValueNodeList.append(["TagName", TagName])
+ ValueNodeList.append(["ToolCode", ToolCode])
+ ValueNodeList.append(["Comment", Comment])
+ ValueNodeXml = CreateXmlElement('SourceFileOtherAttr', '', \
+ ValueNodeList, [])
+ SourceFileNodeList.append(ValueNodeXml)
+ SourceFileNodeXml = CreateXmlElement('SourceFile', '', \
+ SourceFileNodeList, [])
+ SourcesNodeList.append(SourceFileNodeXml)
+ Root = CreateXmlElement('%s' % Key, '', SourcesNodeList, [])
+ return Root
+
+##
+# UserExtensionBinaryXml
+#
+class UserExtensionBinaryXml(object):
+ def __init__(self):
+ self.UserExtensionBinary = ''
+
+ def FromXml(self, Item, Key):
+ if Key:
+ pass
+ if self.UserExtensionBinary:
+ pass
+ Dict = {}
+ for SubItem in XmlList(Item, 'Binaries/Binary'):
+ FileName = XmlElement(SubItem, 'Binary/FileName')
+ FileType = XmlElement(SubItem, 'Binary/FileType')
+ FFE = XmlElement(SubItem, 'Binary/FeatureFlag')
+ SupArch = XmlElement(SubItem, 'Binary/SupArchList')
+ DictKey = (FileName, FileType, ConvertNOTEQToNE(FFE), SupArch)
+ ValueList = []
+ for ValueNodeItem in XmlList(SubItem, \
+ 'Binary/BinaryFileOtherAttr'):
+ Target = XmlElement(ValueNodeItem, \
+ 'BinaryFileOtherAttr/Target')
+ Family = XmlElement(ValueNodeItem, \
+ 'BinaryFileOtherAttr/Family')
+ TagName = XmlElement(ValueNodeItem, \
+ 'BinaryFileOtherAttr/TagName')
+ Comment = XmlElement(ValueNodeItem, \
+ 'BinaryFileOtherAttr/Comment')
+ if (Target == ' ') and (Family == ' ') and \
+ (TagName == ' ') and (Comment == ' '):
+ Target = ''
+ Family = ''
+ TagName = ''
+ Comment = ''
+
+ ValueList.append((Target, Family, TagName, Comment))
+ Dict[DictKey] = ValueList
+ return Dict
+
+ def ToXml(self, Dict, Key):
+ if self.UserExtensionBinary:
+ pass
+ BinariesNodeList = []
+ for Item in Dict:
+ ValueList = Dict[Item]
+ (FileName, FileType, FeatureFlag, SupArch) = Item
+ FileNodeList = []
+ FileNodeList.append(["FileName", FileName])
+ FileNodeList.append(["FileType", FileType])
+ FileNodeList.append(["FeatureFlag", ConvertNEToNOTEQ(FeatureFlag)])
+ FileNodeList.append(["SupArchList", SupArch])
+ for (Target, Family, TagName, Comment) in ValueList:
+ ValueNodeList = []
+ if not (Target or Family or TagName or Comment):
+ Target = ' '
+ Family = ' '
+ TagName = ' '
+ Comment = ' '
+ ValueNodeList.append(["Target", Target])
+ ValueNodeList.append(["Family", Family])
+ ValueNodeList.append(["TagName", TagName])
+ ValueNodeList.append(["Comment", Comment])
+ ValueNodeXml = CreateXmlElement('BinaryFileOtherAttr', '', \
+ ValueNodeList, [])
+ FileNodeList.append(ValueNodeXml)
+ FileNodeXml = CreateXmlElement('Binary', '', FileNodeList, [])
+ BinariesNodeList.append(FileNodeXml)
+ Root = CreateXmlElement('%s' % Key, '', BinariesNodeList, [])
+ return Root
+
+##
+# LibraryClassXml
+#
+class LibraryClassXml(object):
+ def __init__(self):
+ self.Keyword = ''
+ self.HeaderFile = ''
+ self.RecommendedInstanceGuid = ''
+ self.RecommendedInstanceVersion = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.Keyword = XmlAttribute(XmlNode(Item, '%s' % Key), 'Keyword')
+ if self.Keyword == '':
+ self.Keyword = XmlElement(Item, '%s/Keyword' % Key)
+ self.HeaderFile = XmlElement(Item, '%s/HeaderFile' % Key)
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+ LibraryClass = LibraryClassObject()
+ LibraryClass.SetLibraryClass(self.Keyword)
+ LibraryClass.SetIncludeHeader(self.HeaderFile)
+ if self.CommonDefines.Usage:
+ LibraryClass.SetUsage(self.CommonDefines.Usage)
+ LibraryClass.SetSupArchList(self.CommonDefines.SupArchList)
+ LibraryClass.SetSupModuleList(self.CommonDefines.SupModList)
+ LibraryClass.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
+ LibraryClass.SetHelpTextList(GetHelpTextList(self.HelpText))
+ return LibraryClass
+
+ def ToXml(self, LibraryClass, Key):
+ if self.HeaderFile:
+ pass
+ AttributeList = \
+ [['Keyword', LibraryClass.GetLibraryClass()],
+ ['SupArchList', GetStringOfList(LibraryClass.GetSupArchList())],
+ ['SupModList', GetStringOfList(LibraryClass.GetSupModuleList())]
+ ]
+ NodeList = [['HeaderFile', LibraryClass.GetIncludeHeader()]]
+ for Item in LibraryClass.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+ return Root
+
+ def ToXml2(self, LibraryClass, Key):
+ if self.HeaderFile:
+ pass
+ FeatureFlag = ConvertNEToNOTEQ(LibraryClass.GetFeatureFlag())
+ AttributeList = \
+ [['Usage', LibraryClass.GetUsage()], \
+ ['SupArchList', GetStringOfList(LibraryClass.GetSupArchList())], \
+ ['SupModList', GetStringOfList(LibraryClass.GetSupModuleList())], \
+ ['FeatureFlag', FeatureFlag]
+ ]
+ NodeList = [['Keyword', LibraryClass.GetLibraryClass()], ]
+ for Item in LibraryClass.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+ return Root
+
+ def __str__(self):
+ Str = "Keyword = %s HeaderFile = %s RecommendedInstanceGuid = %s RecommendedInstanceVersion = %s %s" % \
+ (self.Keyword, self.HeaderFile, self.RecommendedInstanceGuid, self.RecommendedInstanceVersion, \
+ self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ return Str
+
+##
+# FilenameXml
+#
+class FilenameXml(object):
+ def __init__(self):
+ self.FileType = ''
+ self.Filename = ''
+ self.CommonDefines = CommonDefinesXml()
+
+ def FromXml(self, Item, Key):
+ self.FileType = XmlAttribute(Item, 'FileType')
+ Guid = XmlAttribute(Item, 'GUID')
+ self.Filename = XmlElement(Item, 'Filename')
+ self.CommonDefines.FromXml(Item, Key)
+ FeatureFlag = ConvertNOTEQToNE(self.CommonDefines.FeatureFlag)
+ Filename = FileNameObject()
+ #
+ # Convert File Type
+ #
+ if self.FileType == 'UEFI_IMAGE':
+ self.FileType = 'PE32'
+
+ Filename.SetGuidValue(Guid)
+ Filename.SetFileType(self.FileType)
+ Filename.SetFilename(self.Filename)
+ Filename.SetSupArchList(self.CommonDefines.SupArchList)
+ Filename.SetFeatureFlag(FeatureFlag)
+
+ return Filename
+
+ def ToXml(self, Filename, Key):
+ if self.Filename:
+ pass
+ AttributeList = [['SupArchList', \
+ GetStringOfList(Filename.GetSupArchList())],
+ ['FileType', Filename.GetFileType()],
+ ['FeatureFlag', ConvertNEToNOTEQ(Filename.GetFeatureFlag())],
+ ['GUID', Filename.GetGuidValue()]
+ ]
+ Root = CreateXmlElement('%s' % Key, Filename.GetFilename(), [], AttributeList)
+
+ return Root
+
+ def __str__(self):
+ return "FileType = %s Filename = %s %s" \
+ % (self.FileType, self.Filename, self.CommonDefines)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/GuidProtocolPpiXml.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/GuidProtocolPpiXml.py
new file mode 100755
index 00000000..96c51c33
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/GuidProtocolPpiXml.py
@@ -0,0 +1,278 @@
+## @file
+# This file is used to parse a xml file of .PKG file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+GuidProtocolPpiXml
+'''
+from Library.StringUtils import ConvertNEToNOTEQ
+from Library.StringUtils import ConvertNOTEQToNE
+from Library.StringUtils import GetStringOfList
+from Library.Xml.XmlRoutines import XmlElement
+from Library.Xml.XmlRoutines import XmlAttribute
+from Library.Xml.XmlRoutines import XmlNode
+from Library.Xml.XmlRoutines import XmlList
+from Library.Xml.XmlRoutines import CreateXmlElement
+
+from Object.POM.CommonObject import GuidObject
+from Object.POM.CommonObject import ProtocolObject
+from Object.POM.CommonObject import PpiObject
+
+from Xml.CommonXml import CommonDefinesXml
+from Xml.CommonXml import HelpTextXml
+
+from Xml.XmlParserMisc import GetHelpTextList
+
+##
+#GUID/Protocol/Ppi Common
+#
+class GuidProtocolPpiXml(object):
+ def __init__(self, Mode):
+ self.UiName = ''
+ self.GuidTypes = ''
+ self.Notify = ''
+ self.CName = ''
+ self.GuidValue = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+ #
+ # Guid/Ppi/Library, internal used for indicate return object for
+ # FromXml
+ #
+ self.Type = ''
+ #
+ # there are slightly different field between package and module
+ #
+ self.Mode = Mode
+ self.GuidType = ''
+ self.VariableName = ''
+
+ def FromXml(self, Item, Key):
+ self.UiName = XmlAttribute(XmlNode(Item, '%s' % Key), 'UiName')
+ self.GuidType = XmlAttribute(XmlNode(Item, '%s' % Key), 'GuidType')
+ self.Notify = XmlAttribute(XmlNode(Item, '%s' % Key), 'Notify')
+ self.CName = XmlElement(Item, '%s/CName' % Key)
+ self.GuidValue = XmlElement(Item, '%s/GuidValue' % Key)
+ self.VariableName = XmlElement(Item, '%s/VariableName' % Key)
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ if self.Type == 'Guid':
+ GuidProtocolPpi = GuidObject()
+ elif self.Type == 'Protocol':
+ GuidProtocolPpi = ProtocolObject()
+ else:
+ GuidProtocolPpi = PpiObject()
+ GuidProtocolPpi.SetHelpTextList(GetHelpTextList(self.HelpText))
+
+ return GuidProtocolPpi
+
+ def ToXml(self, GuidProtocolPpi, Key):
+ if self.GuidValue:
+ pass
+ AttributeList = \
+ [['Usage', GetStringOfList(GuidProtocolPpi.GetUsage())], \
+ ['UiName', GuidProtocolPpi.GetName()], \
+ ['GuidType', GetStringOfList(GuidProtocolPpi.GetGuidTypeList())], \
+ ['Notify', str(GuidProtocolPpi.GetNotify()).lower()], \
+ ['SupArchList', GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
+ ['SupModList', GetStringOfList(GuidProtocolPpi.GetSupModuleList())], \
+ ['FeatureFlag', ConvertNEToNOTEQ(GuidProtocolPpi.GetFeatureFlag())]
+ ]
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ ['GuidValue', GuidProtocolPpi.GetGuid()],
+ ['VariableName', GuidProtocolPpi.VariableName]
+ ]
+ for Item in GuidProtocolPpi.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = \
+ "UiName = %s Notify = %s GuidTypes = %s CName = %s GuidValue = %s %s" \
+ % (self.UiName, self.Notify, self.GuidTypes, self.CName, \
+ self.GuidValue, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ return Str
+##
+#GUID Xml
+#
+class GuidXml(GuidProtocolPpiXml):
+ def __init__(self, Mode):
+ GuidProtocolPpiXml.__init__(self, Mode)
+ self.Type = 'Guid'
+
+ def FromXml(self, Item, Key):
+ GuidProtocolPpi = GuidProtocolPpiXml.FromXml(self, Item, Key)
+
+ if self.Mode == 'Package':
+
+ GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
+ GuidProtocolPpi.SetSupModuleList(self.CommonDefines.SupModList)
+ GuidProtocolPpi.SetCName(self.CName)
+ GuidProtocolPpi.SetGuid(self.GuidValue)
+ else:
+ GuidProtocolPpi.SetUsage(self.CommonDefines.Usage)
+ if self.GuidType:
+ GuidProtocolPpi.SetGuidTypeList([self.GuidType])
+ GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
+ GuidProtocolPpi.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
+ GuidProtocolPpi.SetCName(self.CName)
+ GuidProtocolPpi.SetVariableName(self.VariableName)
+ return GuidProtocolPpi
+
+ def ToXml(self, GuidProtocolPpi, Key):
+ if self.Mode == 'Package':
+ AttributeList = \
+ [['GuidType', \
+ GetStringOfList(GuidProtocolPpi.GetGuidTypeList())], \
+ ['SupArchList', \
+ GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
+ ['SupModList', \
+ GetStringOfList(GuidProtocolPpi.GetSupModuleList())],
+ ]
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ ['GuidValue', GuidProtocolPpi.GetGuid()],
+ ]
+ else:
+ AttributeList = \
+ [['Usage', GetStringOfList(GuidProtocolPpi.GetUsage())], \
+ ['GuidType', GetStringOfList(GuidProtocolPpi.GetGuidTypeList())],\
+ ['SupArchList', \
+ GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
+ ['FeatureFlag', ConvertNEToNOTEQ(GuidProtocolPpi.GetFeatureFlag())]
+ ]
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ ['VariableName', GuidProtocolPpi.GetVariableName()]
+ ]
+
+ for Item in GuidProtocolPpi.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+##
+#Protocol Xml
+#
+class ProtocolXml(GuidProtocolPpiXml):
+ def __init__(self, Mode):
+ GuidProtocolPpiXml.__init__(self, Mode)
+ self.Type = 'Protocol'
+
+ def FromXml(self, Item, Key):
+ GuidProtocolPpi = GuidProtocolPpiXml.FromXml(self, Item, Key)
+ if self.Mode == 'Package':
+ GuidProtocolPpi.SetFeatureFlag(self.CommonDefines.FeatureFlag)
+ GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
+ GuidProtocolPpi.SetSupModuleList(self.CommonDefines.SupModList)
+ GuidProtocolPpi.SetCName(self.CName)
+ GuidProtocolPpi.SetGuid(self.GuidValue)
+ else:
+ GuidProtocolPpi.SetUsage(self.CommonDefines.Usage)
+ if self.Notify.upper() == "TRUE":
+ GuidProtocolPpi.SetNotify(True)
+ elif self.Notify.upper() == "FALSE":
+ GuidProtocolPpi.SetNotify(False)
+ else:
+ GuidProtocolPpi.SetNotify('')
+ GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
+ GuidProtocolPpi.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
+ GuidProtocolPpi.SetCName(self.CName)
+
+ return GuidProtocolPpi
+
+ def ToXml(self, GuidProtocolPpi, Key):
+ if self.Mode == 'Package':
+ AttributeList = \
+ [['SupArchList', \
+ GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
+ ['SupModList', \
+ GetStringOfList(GuidProtocolPpi.GetSupModuleList())], \
+ ['FeatureFlag', GuidProtocolPpi.GetFeatureFlag()]
+ ]
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ ['GuidValue', GuidProtocolPpi.GetGuid()],
+ ]
+ else:
+ AttributeList = \
+ [['Usage', GetStringOfList(GuidProtocolPpi.GetUsage())], \
+ ['Notify', str(GuidProtocolPpi.GetNotify()).lower()], \
+ ['SupArchList', \
+ GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
+ ['FeatureFlag', ConvertNEToNOTEQ(GuidProtocolPpi.GetFeatureFlag())]
+ ]
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ ]
+
+ for Item in GuidProtocolPpi.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+##
+#Ppi Xml
+#
+class PpiXml(GuidProtocolPpiXml):
+ def __init__(self, Mode):
+ GuidProtocolPpiXml.__init__(self, Mode)
+ self.Type = 'Ppi'
+
+ def FromXml(self, Item, Key):
+ GuidProtocolPpi = GuidProtocolPpiXml.FromXml(self, Item, Key)
+ if self.Mode == 'Package':
+ GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
+ GuidProtocolPpi.SetSupModuleList(self.CommonDefines.SupModList)
+ GuidProtocolPpi.SetCName(self.CName)
+ GuidProtocolPpi.SetGuid(self.GuidValue)
+ else:
+ GuidProtocolPpi.SetUsage(self.CommonDefines.Usage)
+ if self.Notify.upper() == "TRUE":
+ GuidProtocolPpi.SetNotify(True)
+ elif self.Notify.upper() == "FALSE":
+ GuidProtocolPpi.SetNotify(False)
+ else:
+ GuidProtocolPpi.SetNotify('')
+ GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
+ GuidProtocolPpi.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
+ GuidProtocolPpi.SetCName(self.CName)
+
+ return GuidProtocolPpi
+
+ def ToXml(self, GuidProtocolPpi, Key):
+ if self.Mode == 'Package':
+ AttributeList = \
+ [['SupArchList', \
+ GetStringOfList(GuidProtocolPpi.GetSupArchList())],
+ ]
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ ['GuidValue', GuidProtocolPpi.GetGuid()],
+ ]
+ else:
+ AttributeList = \
+ [['Usage', GetStringOfList(GuidProtocolPpi.GetUsage())], \
+ ['Notify', str(GuidProtocolPpi.GetNotify()).lower()], \
+ ['SupArchList', \
+ GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
+ ['FeatureFlag', ConvertNEToNOTEQ(GuidProtocolPpi.GetFeatureFlag())]
+ ]
+ NodeList = [['CName', GuidProtocolPpi.GetCName()],
+ ]
+
+ for Item in GuidProtocolPpi.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+ return Root
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/IniToXml.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/IniToXml.py
new file mode 100755
index 00000000..a0c939de
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/IniToXml.py
@@ -0,0 +1,496 @@
+## @file
+# This file is for converting package information data file to xml file.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+IniToXml
+'''
+
+import os.path
+import re
+from time import strftime
+from time import localtime
+
+import Logger.Log as Logger
+from Logger.ToolError import UPT_INI_PARSE_ERROR
+from Logger.ToolError import FILE_NOT_FOUND
+from Library.Xml.XmlRoutines import CreateXmlElement
+from Library.DataType import TAB_VALUE_SPLIT
+from Library.DataType import TAB_EQUAL_SPLIT
+from Library.DataType import TAB_SECTION_START
+from Library.DataType import TAB_SECTION_END
+from Logger import StringTable as ST
+from Library.StringUtils import ConvertSpecialChar
+from Library.ParserValidate import IsValidPath
+from Library import GlobalData
+
+## log error:
+#
+# @param error: error
+# @param File: File
+# @param Line: Line
+#
+def IniParseError(Error, File, Line):
+ Logger.Error("UPT", UPT_INI_PARSE_ERROR, File=File,
+ Line=Line, ExtraData=Error)
+
+## __ValidatePath
+#
+# @param Path: Path to be checked
+#
+def __ValidatePath(Path, Root):
+ Path = Path.strip()
+ if os.path.isabs(Path) or not IsValidPath(Path, Root):
+ return False, ST.ERR_FILELIST_LOCATION % (Root, Path)
+ return True, ''
+
+## ValidateMiscFile
+#
+# @param Filename: File to be checked
+#
+def ValidateMiscFile(Filename):
+ Root = GlobalData.gWORKSPACE
+ return __ValidatePath(Filename, Root)
+
+## ValidateToolsFile
+#
+# @param Filename: File to be checked
+#
+def ValidateToolsFile(Filename):
+ Valid, Cause = False, ''
+ if not Valid and 'EDK_TOOLS_PATH' in os.environ:
+ Valid, Cause = __ValidatePath(Filename, os.environ['EDK_TOOLS_PATH'])
+ if not Valid:
+ Valid, Cause = __ValidatePath(Filename, GlobalData.gWORKSPACE)
+ return Valid, Cause
+
+## ParseFileList
+#
+# @param Line: Line
+# @param Map: Map
+# @param CurrentKey: CurrentKey
+# @param PathFunc: Path validate function
+#
+def ParseFileList(Line, Map, CurrentKey, PathFunc):
+ FileList = ["", {}]
+ TokenList = Line.split(TAB_VALUE_SPLIT)
+ if len(TokenList) > 0:
+ Path = TokenList[0].strip().replace('\\', '/')
+ if not Path:
+ return False, ST.ERR_WRONG_FILELIST_FORMAT
+ Valid, Cause = PathFunc(Path)
+ if not Valid:
+ return Valid, Cause
+ FileList[0] = TokenList[0].strip()
+ for Token in TokenList[1:]:
+ Attr = Token.split(TAB_EQUAL_SPLIT)
+ if len(Attr) != 2 or not Attr[0].strip() or not Attr[1].strip():
+ return False, ST.ERR_WRONG_FILELIST_FORMAT
+
+ Key = Attr[0].strip()
+ Val = Attr[1].strip()
+ if Key not in ['OS', 'Executable']:
+ return False, ST.ERR_UNKNOWN_FILELIST_ATTR % Key
+
+ if Key == 'OS' and Val not in ["Win32", "Win64", "Linux32",
+ "Linux64", "OS/X32", "OS/X64",
+ "GenericWin", "GenericNix"]:
+ return False, ST.ERR_FILELIST_ATTR % 'OS'
+ elif Key == 'Executable' and Val not in ['true', 'false']:
+ return False, ST.ERR_FILELIST_ATTR % 'Executable'
+ FileList[1][Key] = Val
+
+ Map[CurrentKey].append(FileList)
+ return True, ''
+
+## Create header XML file
+#
+# @param DistMap: DistMap
+# @param Root: Root
+#
+def CreateHeaderXml(DistMap, Root):
+ Element1 = CreateXmlElement('Name', DistMap['Name'],
+ [], [['BaseName', DistMap['BaseName']]])
+ Element2 = CreateXmlElement('GUID', DistMap['GUID'],
+ [], [['Version', DistMap['Version']]])
+ AttributeList = [['ReadOnly', DistMap['ReadOnly']],
+ ['RePackage', DistMap['RePackage']]]
+ NodeList = [Element1,
+ Element2,
+ ['Vendor', DistMap['Vendor']],
+ ['Date', DistMap['Date']],
+ ['Copyright', DistMap['Copyright']],
+ ['License', DistMap['License']],
+ ['Abstract', DistMap['Abstract']],
+ ['Description', DistMap['Description']],
+ ['Signature', DistMap['Signature']],
+ ['XmlSpecification', DistMap['XmlSpecification']],
+ ]
+ Root.appendChild(CreateXmlElement('DistributionHeader', '',
+ NodeList, AttributeList))
+
+## Create tools XML file
+#
+# @param Map: Map
+# @param Root: Root
+# @param Tag: Tag
+#
+def CreateToolsXml(Map, Root, Tag):
+ #
+ # Check if all elements in this section are empty
+ #
+ for Key in Map:
+ if len(Map[Key]) > 0:
+ break
+ else:
+ return
+
+ NodeList = [['Name', Map['Name']],
+ ['Copyright', Map['Copyright']],
+ ['License', Map['License']],
+ ['Abstract', Map['Abstract']],
+ ['Description', Map['Description']],
+ ]
+ HeaderNode = CreateXmlElement('Header', '', NodeList, [])
+ NodeList = [HeaderNode]
+
+ for File in Map['FileList']:
+ AttrList = []
+ for Key in File[1]:
+ AttrList.append([Key, File[1][Key]])
+ NodeList.append(CreateXmlElement('Filename', File[0], [], AttrList))
+ Root.appendChild(CreateXmlElement(Tag, '', NodeList, []))
+
+## ValidateValues
+#
+# @param Key: Key
+# @param Value: Value
+# @param SectionName: SectionName
+#
+def ValidateValues(Key, Value, SectionName):
+ if SectionName == 'DistributionHeader':
+ Valid, Cause = ValidateRegValues(Key, Value)
+ if not Valid:
+ return Valid, Cause
+ Valid = __ValidateDistHeader(Key, Value)
+ if not Valid:
+ return Valid, ST.ERR_VALUE_INVALID % (Key, SectionName)
+ else:
+ Valid = __ValidateOtherHeader(Key, Value)
+ if not Valid:
+ return Valid, ST.ERR_VALUE_INVALID % (Key, SectionName)
+ return True, ''
+
+## ValidateRegValues
+#
+# @param Key: Key
+# @param Value: Value
+#
+def ValidateRegValues(Key, Value):
+ ValidateMap = {
+ 'ReadOnly' :
+ ('true|false', ST.ERR_BOOLEAN_VALUE % (Key, Value)),
+ 'RePackage' :
+ ('true|false', ST.ERR_BOOLEAN_VALUE % (Key, Value)),
+ 'GUID' :
+ ('[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}'
+ '-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}',
+ ST.ERR_GUID_VALUE % Value),
+ 'Version' : ('[0-9]+(\.[0-9]+)?', ST.ERR_VERSION_VALUE % \
+ (Key, Value)),
+ 'XmlSpecification' : ('1\.1', ST.ERR_VERSION_XMLSPEC % Value)
+ }
+ if Key not in ValidateMap:
+ return True, ''
+ Elem = ValidateMap[Key]
+ Match = re.compile(Elem[0]).match(Value)
+ if Match and Match.start() == 0 and Match.end() == len(Value):
+ return True, ''
+ return False, Elem[1]
+
+## __ValidateDistHeaderName
+#
+# @param Name: Name
+#
+def __ValidateDistHeaderName(Name):
+ if len(Name) < 1:
+ return False
+
+ for Char in Name:
+ if ord(Char) < 0x20 or ord(Char) >= 0x7f:
+ return False
+ return True
+
+## __ValidateDistHeaderBaseName
+#
+# @param BaseName: BaseName
+#
+def __ValidateDistHeaderBaseName(BaseName):
+ if not BaseName:
+ return False
+# if CheckLen and len(BaseName) < 2:
+# return False
+ if not BaseName[0].isalnum() and BaseName[0] != '_':
+ return False
+ for Char in BaseName[1:]:
+ if not Char.isalnum() and Char not in '-_':
+ return False
+ return True
+
+## __ValidateDistHeaderAbstract
+#
+# @param Abstract: Abstract
+#
+def __ValidateDistHeaderAbstract(Abstract):
+ return '\t' not in Abstract and len(Abstract.splitlines()) == 1
+
+## __ValidateOtherHeaderAbstract
+#
+# @param Abstract: Abstract
+#
+def __ValidateOtherHeaderAbstract(Abstract):
+ return __ValidateDistHeaderAbstract(Abstract)
+
+## __ValidateDistHeader
+#
+# @param Key: Key
+# @param Value: Value
+#
+def __ValidateDistHeader(Key, Value):
+ ValidateMap = {
+ 'Name' : __ValidateDistHeaderName,
+ 'BaseName' : __ValidateDistHeaderBaseName,
+ 'Abstract' : __ValidateDistHeaderAbstract,
+ 'Vendor' : __ValidateDistHeaderAbstract
+ }
+ return not (Value and Key in ValidateMap and not ValidateMap[Key](Value))
+
+## __ValidateOtherHeader
+#
+# @param Key: Key
+# @param Value: Value
+#
+def __ValidateOtherHeader(Key, Value):
+ ValidateMap = {
+ 'Name' : __ValidateDistHeaderName,
+ 'Abstract' : __ValidateOtherHeaderAbstract
+ }
+ return not (Value and Key in ValidateMap and not ValidateMap[Key](Value))
+
+## Convert ini file to xml file
+#
+# @param IniFile
+#
+def IniToXml(IniFile):
+ if not os.path.exists(IniFile):
+ Logger.Error("UPT", FILE_NOT_FOUND, ST.ERR_TEMPLATE_NOTFOUND % IniFile)
+
+ DistMap = {'ReadOnly' : '', 'RePackage' : '', 'Name' : '',
+ 'BaseName' : '', 'GUID' : '', 'Version' : '', 'Vendor' : '',
+ 'Date' : '', 'Copyright' : '', 'License' : '', 'Abstract' : '',
+ 'Description' : '', 'Signature' : '', 'XmlSpecification' : ''
+ }
+
+ ToolsMap = {'Name' : '', 'Copyright' : '', 'License' : '',
+ 'Abstract' : '', 'Description' : '', 'FileList' : []}
+ #
+ # Only FileList is a list: [['file1', {}], ['file2', {}], ...]
+ #
+ MiscMap = {'Name' : '', 'Copyright' : '', 'License' : '',
+ 'Abstract' : '', 'Description' : '', 'FileList' : []}
+
+ SectionMap = {
+ 'DistributionHeader' : DistMap,
+ 'ToolsHeader' : ToolsMap,
+ 'MiscellaneousFilesHeader' : MiscMap
+ }
+
+ PathValidator = {
+ 'ToolsHeader' : ValidateToolsFile,
+ 'MiscellaneousFilesHeader' : ValidateMiscFile
+ }
+
+ ParsedSection = []
+
+ SectionName = ''
+ CurrentKey = ''
+ PreMap = None
+ Map = None
+ FileContent = ConvertSpecialChar(open(IniFile, 'r').readlines())
+ LastIndex = 0
+ for Index in range(0, len(FileContent)):
+ LastIndex = Index
+ Line = FileContent[Index].strip()
+ if Line == '' or Line.startswith(';'):
+ continue
+ if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
+ CurrentKey = ''
+ SectionName = Line[1:-1].strip()
+ if SectionName not in SectionMap:
+ IniParseError(ST.ERR_SECTION_NAME_INVALID % SectionName,
+ IniFile, Index+1)
+
+ if SectionName in ParsedSection:
+ IniParseError(ST.ERR_SECTION_REDEFINE % SectionName,
+ IniFile, Index+1)
+ else:
+ ParsedSection.append(SectionName)
+
+ Map = SectionMap[SectionName]
+ continue
+ if not Map:
+ IniParseError(ST.ERR_SECTION_NAME_NONE, IniFile, Index+1)
+ TokenList = Line.split(TAB_EQUAL_SPLIT, 1)
+ TempKey = TokenList[0].strip()
+ #
+ # Value spanned multiple or same keyword appears more than one time
+ #
+ if len(TokenList) < 2 or TempKey not in Map:
+ if CurrentKey == '':
+ IniParseError(ST.ERR_KEYWORD_INVALID % TempKey,
+ IniFile, Index+1)
+ elif CurrentKey == 'FileList':
+ #
+ # Special for FileList
+ #
+ Valid, Cause = ParseFileList(Line, Map, CurrentKey,
+ PathValidator[SectionName])
+ if not Valid:
+ IniParseError(Cause, IniFile, Index+1)
+
+ else:
+ #
+ # Multiple lines for one key such as license
+ # Or if string on the left side of '=' is not a keyword
+ #
+ Map[CurrentKey] = ''.join([Map[CurrentKey], '\n', Line])
+ Valid, Cause = ValidateValues(CurrentKey,
+ Map[CurrentKey], SectionName)
+ if not Valid:
+ IniParseError(Cause, IniFile, Index+1)
+ continue
+
+ if (TokenList[1].strip() == ''):
+ IniParseError(ST.ERR_EMPTY_VALUE, IniFile, Index+1)
+
+ #
+ # A keyword found
+ #
+ CurrentKey = TempKey
+ if Map[CurrentKey]:
+ IniParseError(ST.ERR_KEYWORD_REDEFINE % CurrentKey,
+ IniFile, Index+1)
+
+ if id(Map) != id(PreMap) and Map['Copyright']:
+ PreMap = Map
+ Copyright = Map['Copyright'].lower()
+ Pos = Copyright.find('copyright')
+ if Pos == -1:
+ IniParseError(ST.ERR_COPYRIGHT_CONTENT, IniFile, Index)
+ if not Copyright[Pos + len('copyright'):].lstrip(' ').startswith('('):
+ IniParseError(ST.ERR_COPYRIGHT_CONTENT, IniFile, Index)
+
+ if CurrentKey == 'FileList':
+ Valid, Cause = ParseFileList(TokenList[1], Map, CurrentKey,
+ PathValidator[SectionName])
+ if not Valid:
+ IniParseError(Cause, IniFile, Index+1)
+ else:
+ Map[CurrentKey] = TokenList[1].strip()
+ Valid, Cause = ValidateValues(CurrentKey,
+ Map[CurrentKey], SectionName)
+ if not Valid:
+ IniParseError(Cause, IniFile, Index+1)
+
+ if id(Map) != id(PreMap) and Map['Copyright'] and 'copyright' not in Map['Copyright'].lower():
+ IniParseError(ST.ERR_COPYRIGHT_CONTENT, IniFile, LastIndex)
+
+ #
+ # Check mandatory keys
+ #
+ CheckMdtKeys(DistMap, IniFile, LastIndex,
+ (('ToolsHeader', ToolsMap), ('MiscellaneousFilesHeader', MiscMap))
+ )
+
+ return CreateXml(DistMap, ToolsMap, MiscMap, IniFile)
+
+
+## CheckMdtKeys
+#
+# @param MdtDistKeys: All mandatory keys
+# @param DistMap: Dist content
+# @param IniFile: Ini file
+# @param LastIndex: Last index of Ini file
+# @param Maps: Tools and Misc section name and map. (('section_name', map),*)
+#
+def CheckMdtKeys(DistMap, IniFile, LastIndex, Maps):
+ MdtDistKeys = ['Name', 'GUID', 'Version', 'Vendor', 'Copyright', 'License', 'Abstract', 'XmlSpecification']
+ for Key in MdtDistKeys:
+ if Key not in DistMap or DistMap[Key] == '':
+ IniParseError(ST.ERR_KEYWORD_MANDATORY % Key, IniFile, LastIndex+1)
+
+ if '.' not in DistMap['Version']:
+ DistMap['Version'] = DistMap['Version'] + '.0'
+
+ DistMap['Date'] = str(strftime("%Y-%m-%dT%H:%M:%S", localtime()))
+
+ #
+ # Check Tools Surface Area according to UPT Spec
+ # <Tools> {0,}
+ # <Header> ... </Header> {0,1}
+ # <Filename> ... </Filename> {1,}
+ # </Tools>
+ # <Header>
+ # <Name> xs:normalizedString </Name> {1}
+ # <Copyright> xs:string </Copyright> {0,1}
+ # <License> xs:string </License> {0,1}
+ # <Abstract> xs:normalizedString </Abstract> {0,1}
+ # <Description> xs:string </Description> {0,1}
+ # </Header>
+ #
+ for Item in Maps:
+ Map = Item[1]
+ NonEmptyKey = 0
+ for Key in Map:
+ if Map[Key]:
+ NonEmptyKey += 1
+
+ if NonEmptyKey > 0 and not Map['FileList']:
+ IniParseError(ST.ERR_KEYWORD_MANDATORY % (Item[0] + '.FileList'), IniFile, LastIndex+1)
+
+ if NonEmptyKey > 0 and not Map['Name']:
+ IniParseError(ST.ERR_KEYWORD_MANDATORY % (Item[0] + '.Name'), IniFile, LastIndex+1)
+
+## CreateXml
+#
+# @param DistMap: Dist Content
+# @param ToolsMap: Tools Content
+# @param MiscMap: Misc Content
+# @param IniFile: Ini File
+#
+def CreateXml(DistMap, ToolsMap, MiscMap, IniFile):
+ Attrs = [['xmlns', 'http://www.uefi.org/2011/1.1'],
+ ['xmlns:xsi', 'http:/www.w3.org/2001/XMLSchema-instance'],
+ ]
+ Root = CreateXmlElement('DistributionPackage', '', [], Attrs)
+ CreateHeaderXml(DistMap, Root)
+ CreateToolsXml(ToolsMap, Root, 'Tools')
+ CreateToolsXml(MiscMap, Root, 'MiscellaneousFiles')
+
+ FileAndExt = IniFile.rsplit('.', 1)
+ if len(FileAndExt) > 1:
+ FileName = FileAndExt[0] + '.xml'
+ else:
+ FileName = IniFile + '.xml'
+ File = open(FileName, 'w')
+
+ try:
+ File.write(Root.toprettyxml(indent = ' '))
+ finally:
+ File.close()
+ return FileName
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py
new file mode 100755
index 00000000..3419affa
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py
@@ -0,0 +1,1003 @@
+## @file
+# This file is used to parse a Module file of .PKG file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+ModuleSurfaceAreaXml
+'''
+from xml.dom import minidom
+
+from Library.StringUtils import ConvertNEToNOTEQ
+from Library.StringUtils import ConvertNOTEQToNE
+from Library.StringUtils import GetStringOfList
+from Library.StringUtils import IsMatchArch
+from Library.Xml.XmlRoutines import XmlElement
+from Library.Xml.XmlRoutines import XmlAttribute
+from Library.Xml.XmlRoutines import XmlNode
+from Library.Xml.XmlRoutines import XmlList
+from Library.Xml.XmlRoutines import CreateXmlElement
+from Object.POM.CommonObject import GuidVersionObject
+from Object.POM.ModuleObject import BootModeObject
+from Object.POM.ModuleObject import DepexObject
+from Object.POM.ModuleObject import ModuleObject
+from Object.POM.ModuleObject import EventObject
+from Object.POM.ModuleObject import HobObject
+from Object.POM.ModuleObject import SourceFileObject
+from Object.POM.ModuleObject import PackageDependencyObject
+from Object.POM.ModuleObject import ExternObject
+from Object.POM.ModuleObject import BinaryFileObject
+from Object.POM.ModuleObject import AsBuiltObject
+from Object.POM.ModuleObject import BinaryBuildFlagObject
+from Xml.CommonXml import ClonedFromXml
+from Xml.CommonXml import HeaderXml
+from Xml.CommonXml import HelpTextXml
+from Xml.CommonXml import CommonDefinesXml
+from Xml.CommonXml import LibraryClassXml
+from Xml.CommonXml import UserExtensionsXml
+from Xml.CommonXml import MiscellaneousFileXml
+from Xml.CommonXml import FilenameXml
+from Xml.GuidProtocolPpiXml import GuidXml
+from Xml.GuidProtocolPpiXml import ProtocolXml
+from Xml.GuidProtocolPpiXml import PpiXml
+from Xml.PcdXml import PcdEntryXml
+from Xml.XmlParserMisc import GetHelpTextList
+from Library import GlobalData
+from Library.Misc import GetSplitValueList
+
+## BinaryFileXml
+#
+# represent the following XML item
+#
+# <BinaryFile>
+# <Filename
+# FileType=" FileType " {1}
+# SupArchList=" ArchListType " {0,1}
+# FeatureFlag=" FeatureFlagExpression " {0,1} >
+# xs:anyURI
+# </Filename> {1,}
+# <AsBuilt> ... </AsBuilt> {0,}
+# </BinaryFile> {1,}
+#
+class BinaryFileXml(object):
+ def __init__(self):
+ self.FileNames = []
+ self.AsBuiltList = []
+ self.PatchPcdValues = ''
+ self.PcdExValues = ''
+ self.LibraryInstances = ''
+ self.BuildFlags = ''
+
+ def FromXml(self, Item, Key):
+ if self.FileNames:
+ pass
+ BinaryFile = BinaryFileObject()
+ FilenameList = []
+ SupArchList = ['COMMON']
+ for SubItem in XmlList(Item, '%s/Filename' % Key):
+ Axml = FilenameXml()
+ Bxml = Axml.FromXml(SubItem, 'Filename')
+ FilenameList.append(Bxml)
+ BinaryFile.SetFileNameList(FilenameList)
+ for FileName in FilenameList:
+ if FileName.GetSupArchList():
+ SupArchList = FileName.GetSupArchList()
+ BinaryFile.SetSupArchList(SupArchList)
+ if GlobalData.gIS_BINARY_INF:
+ AsBuiltList = []
+ for AsBuiltItem in XmlList(Item, '%s/AsBuilt' % Key):
+ AsBuilt = AsBuiltObject()
+
+ PatchPcdValueList = []
+ for SubItem in XmlList(AsBuiltItem, 'AsBuilt/PatchPcdValue'):
+ Axml = PcdEntryXml()
+ Bxml = Axml.FromXml(SubItem, 'PatchPcdValue')
+ PatchPcdValueList.append(Bxml)
+ AsBuilt.SetPatchPcdList(PatchPcdValueList)
+ PcdExValueList = []
+ for SubItem in XmlList(AsBuiltItem, 'AsBuilt/PcdExValue'):
+ Axml = PcdEntryXml()
+ Bxml = Axml.FromXml(SubItem, 'PcdExValue')
+ PcdExValueList.append(Bxml)
+ AsBuilt.SetPcdExList(PcdExValueList)
+ LibraryList = []
+ for SubItem in XmlList(Item, '%s/AsBuilt/LibraryInstances/GUID' % Key):
+ GuidVerObj = GuidVersionObject()
+ GUID = XmlElement(SubItem, 'GUID')
+ Version = XmlAttribute(XmlNode(SubItem, 'GUID'), 'Version')
+ GuidVerObj.SetGuid(GUID)
+ GuidVerObj.SetVersion(Version)
+ LibraryList.append(GuidVerObj)
+ if XmlList(Item, '%s/AsBuilt/LibraryInstances' % Key) and not LibraryList:
+ LibraryList = [None]
+ AsBuilt.SetLibraryInstancesList(LibraryList)
+ BuildFlagList = []
+ for SubItem in XmlList(Item, '%s/AsBuilt/BuildFlags' % Key):
+ BuildFlag = BuildFlagXml()
+ BuildFlagList.append(BuildFlag.FromXml2(SubItem, 'BuildFlags'))
+ AsBuilt.SetBuildFlagsList(BuildFlagList)
+ AsBuiltList.append(AsBuilt)
+ BinaryFile.SetAsBuiltList(AsBuiltList)
+ return BinaryFile
+
+ def ToXml(self, BinaryFile, Key):
+ if self.FileNames:
+ pass
+ NodeList = []
+ FilenameList = BinaryFile.GetFileNameList()
+ SupportArch = None
+ for Filename in FilenameList:
+ Tmp = FilenameXml()
+ NodeList.append(Tmp.ToXml(Filename, 'Filename'))
+ SupportArch = Filename.SupArchList
+
+ AsBuildList = BinaryFile.GetAsBuiltList()
+ PatchPcdValueList = AsBuildList.GetPatchPcdList()
+ PcdExList = AsBuildList.GetPcdExList()
+ LibGuidVerList = AsBuildList.GetLibraryInstancesList()
+ BuildFlagList = AsBuildList.GetBuildFlagsList()
+
+ AsBuiltNodeList = []
+
+ for Pcd in PatchPcdValueList:
+ if IsMatchArch(Pcd.SupArchList, SupportArch):
+ Tmp = PcdEntryXml()
+ AsBuiltNodeList.append(Tmp.ToXml4(Pcd, 'PatchPcdValue'))
+
+ for Pcd in PcdExList:
+ if IsMatchArch(Pcd.SupArchList, SupportArch):
+ Tmp = PcdEntryXml()
+ AsBuiltNodeList.append(Tmp.ToXml4(Pcd, 'PcdExValue'))
+
+ GuiVerElemList = []
+ for LibGuidVer in LibGuidVerList:
+ if LibGuidVer.GetLibGuid() and IsMatchArch(LibGuidVer.GetSupArchList(), SupportArch):
+ GuiVerElem = \
+ CreateXmlElement('GUID', LibGuidVer.GetLibGuid(), [], [['Version', LibGuidVer.GetLibVersion()]])
+ GuiVerElemList.append(GuiVerElem)
+ if len(GuiVerElemList) > 0:
+ LibGuidVerElem = CreateXmlElement('LibraryInstances', '', GuiVerElemList, [])
+ AsBuiltNodeList.append(LibGuidVerElem)
+
+ for BuildFlag in BuildFlagList:
+ if IsMatchArch(BuildFlag.GetSupArchList(), SupportArch):
+ for Item in BuildFlag.GetAsBuildList():
+ Tmp = BuildFlagXml()
+ Elem = CreateXmlElement('BuildFlags', ''.join(Item), [], [])
+ AsBuiltNodeList.append(Elem)
+
+ if len(AsBuiltNodeList) > 0:
+ Element = CreateXmlElement('AsBuilt', '', AsBuiltNodeList, [])
+ NodeList.append(Element)
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, [])
+
+ return Root
+
+ def __str__(self):
+ Str = "BinaryFiles:"
+ for Item in self.FileNames:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.PatchPcdValues:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.PcdExValues:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.LibraryInstances:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.BuildFlags:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+##
+# PackageXml
+#
+class PackageXml(object):
+ def __init__(self):
+ self.Description = ''
+ self.Guid = ''
+ self.Version = ''
+ self.CommonDefines = CommonDefinesXml()
+
+ def FromXml(self, Item, Key):
+ self.Description = XmlElement(Item, '%s/Description' % Key)
+ self.Guid = XmlElement(Item, '%s/GUID' % Key)
+ self.Version = XmlAttribute(XmlNode(Item, '%s/GUID' % Key), 'Version')
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+
+ PackageDependency = PackageDependencyObject()
+ PackageDependency.SetPackage(self.Description)
+ PackageDependency.SetGuid(self.Guid)
+ PackageDependency.SetVersion(self.Version)
+ PackageDependency.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
+ PackageDependency.SetSupArchList(self.CommonDefines.SupArchList)
+
+ return PackageDependency
+
+ def ToXml(self, PackageDependency, Key):
+ if self.Guid:
+ pass
+ AttributeList = [['SupArchList', GetStringOfList(PackageDependency.GetSupArchList())],
+ ['FeatureFlag', ConvertNEToNOTEQ(PackageDependency.GetFeatureFlag())], ]
+ Element1 = CreateXmlElement('GUID', PackageDependency.GetGuid(), [],
+ [['Version', PackageDependency.GetVersion()]])
+ NodeList = [['Description', PackageDependency.GetPackage()], Element1, ]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "Description = %s Guid = %s Version = %s %s" \
+ % (self.Description, self.Guid, self.Version, self.CommonDefines)
+ return Str
+##
+# ExternXml
+#
+class ExternXml(object):
+ def __init__(self):
+ self.CommonDefines = CommonDefinesXml()
+ self.EntryPoint = ''
+ self.UnloadImage = ''
+ self.Constructor = ''
+ self.Destructor = ''
+ self.SupModList = ''
+ self.SupArchList = ''
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.CommonDefines.FromXml(Item, Key)
+ self.EntryPoint = XmlElement(Item, '%s/EntryPoint' % Key)
+ self.UnloadImage = XmlElement(Item, '%s/UnloadImage' % Key)
+ self.Constructor = XmlElement(Item, '%s/Constructor' % Key)
+ self.Destructor = XmlElement(Item, '%s/Destructor' % Key)
+
+ Extern = ExternObject()
+ Extern.SetEntryPoint(self.EntryPoint)
+ Extern.SetUnloadImage(self.UnloadImage)
+ Extern.SetConstructor(self.Constructor)
+ Extern.SetDestructor(self.Destructor)
+ if self.CommonDefines.SupModList:
+ Extern.SetSupModList(self.CommonDefines.SupModList)
+ if self.CommonDefines.SupArchList:
+ Extern.SetSupArchList(self.CommonDefines.SupArchList)
+ return Extern
+
+ def ToXml(self, Extern, Key):
+ if self.HelpText:
+ pass
+
+ NodeList = []
+ if Extern.GetEntryPoint():
+ NodeList.append(['EntryPoint', Extern.GetEntryPoint()])
+ if Extern.GetUnloadImage():
+ NodeList.append(['UnloadImage', Extern.GetUnloadImage()])
+ if Extern.GetConstructor():
+ NodeList.append(['Constructor', Extern.GetConstructor()])
+ if Extern.GetDestructor():
+ NodeList.append(['Destructor', Extern.GetDestructor()])
+ Root = CreateXmlElement('%s' % Key, '', NodeList, [])
+
+ return Root
+
+ def __str__(self):
+ Str = "EntryPoint = %s UnloadImage = %s Constructor = %s Destructor = %s %s" \
+ % (self.EntryPoint, self.UnloadImage, self.Constructor, self.Destructor, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+##
+# DepexXml
+#
+class DepexXml(object):
+ def __init__(self):
+ self.CommonDefines = CommonDefinesXml()
+ self.Expression = None
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ if not Item:
+ return None
+ self.CommonDefines.FromXml(Item, Key)
+ self.Expression = XmlElement(Item, '%s/Expression' % Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Depex = DepexObject()
+ Depex.SetDepex(self.Expression)
+ Depex.SetModuleType(self.CommonDefines.SupModList)
+ Depex.SetSupArchList(self.CommonDefines.SupArchList)
+ Depex.SetFeatureFlag(self.CommonDefines.FeatureFlag)
+ Depex.SetHelpTextList(GetHelpTextList(self.HelpText))
+
+ return Depex
+
+ def ToXml(self, Depex, Key):
+ if self.HelpText:
+ pass
+ AttributeList = [['SupArchList', GetStringOfList(Depex.GetSupArchList())],
+ ['SupModList', Depex.GetModuleType()]]
+ NodeList = [['Expression', Depex.GetDepex()]]
+ if Depex.GetHelpText():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Depex.GetHelpText(), 'HelpText'))
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+ return Root
+
+ def __str__(self):
+ Str = "Expression = %s" % (self.Expression)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+##
+# BootModeXml
+#
+class BootModeXml(object):
+ def __init__(self):
+ self.SupportedBootModes = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.SupportedBootModes = \
+ XmlElement(Item, '%s/SupportedBootModes' % Key)
+ self.CommonDefines.FromXml(Item, Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ BootMode = BootModeObject()
+ BootMode.SetSupportedBootModes(self.SupportedBootModes)
+ BootMode.SetUsage(self.CommonDefines.Usage)
+ BootMode.SetHelpTextList(GetHelpTextList(self.HelpText))
+
+ return BootMode
+
+ def ToXml(self, BootMode, Key):
+ if self.HelpText:
+ pass
+ AttributeList = [['Usage', BootMode.GetUsage()], ]
+ NodeList = [['SupportedBootModes', BootMode.GetSupportedBootModes()]]
+ for Item in BootMode.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "SupportedBootModes = %s %s" % (self.SupportedBootModes, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+##
+# EventXml
+#
+class EventXml(object):
+ def __init__(self):
+ self.EventType = ''
+ self.Name = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.EventType = XmlAttribute(XmlNode(Item, '%s' % Key), 'EventType')
+ self.Name = XmlElement(Item, '%s' % Key)
+ self.CommonDefines.FromXml(Item, Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Event = EventObject()
+ Event.SetEventType(self.EventType)
+ Event.SetUsage(self.CommonDefines.Usage)
+ Event.SetHelpTextList(GetHelpTextList(self.HelpText))
+
+ return Event
+
+ def ToXml(self, Event, Key):
+ if self.HelpText:
+ pass
+ AttributeList = [['EventType', Event.GetEventType()],
+ ['Usage', Event.GetUsage()],
+ ]
+ NodeList = []
+ for Item in Event.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "EventType = %s %s" % (self.EventType, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+##
+# HobXml
+#
+class HobXml(object):
+ def __init__(self):
+ self.HobType = ''
+ self.Name = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.HobType = XmlAttribute(XmlNode(Item, '%s' % Key), 'HobType')
+ self.Name = XmlElement(Item, '%s' % Key)
+ self.CommonDefines.FromXml(Item, Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Hob = HobObject()
+ Hob.SetHobType(self.HobType)
+ Hob.SetSupArchList(self.CommonDefines.SupArchList)
+ Hob.SetUsage(self.CommonDefines.Usage)
+ Hob.SetHelpTextList(GetHelpTextList(self.HelpText))
+
+ return Hob
+
+ def ToXml(self, Hob, Key):
+ if self.Name:
+ pass
+ AttributeList = [['HobType', Hob.GetHobType()],
+ ['Usage', Hob.GetUsage()],
+ ['SupArchList', GetStringOfList(Hob.GetSupArchList())], ]
+ NodeList = []
+ for Item in Hob.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "HobType = %s %s" % (self.HobType, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+##
+# SourceFileXml
+#
+class SourceFileXml(object):
+ def __init__(self):
+ self.SourceFile = ''
+ self.ToolChainFamily = ''
+ self.FileType = ''
+ self.CommonDefines = CommonDefinesXml()
+
+ def FromXml(self, Item, Key):
+ self.ToolChainFamily = XmlAttribute(Item, 'Family')
+ self.SourceFile = XmlElement(Item, 'Filename')
+ self.CommonDefines.FromXml(Item, Key)
+
+ self.CommonDefines.FeatureFlag = ConvertNOTEQToNE(self.CommonDefines.FeatureFlag)
+
+ SourceFile = SourceFileObject()
+ SourceFile.SetSourceFile(self.SourceFile)
+ SourceFile.SetFamily(self.ToolChainFamily)
+ SourceFile.SetSupArchList(self.CommonDefines.SupArchList)
+ SourceFile.SetFeatureFlag(self.CommonDefines.FeatureFlag)
+
+ return SourceFile
+
+ def ToXml(self, SourceFile, Key):
+ if self.SourceFile:
+ pass
+ FeatureFlag = ConvertNEToNOTEQ(SourceFile.GetFeatureFlag())
+ AttributeList = [['SupArchList', GetStringOfList(SourceFile.GetSupArchList())],
+ ['Family', SourceFile.GetFamily()],
+ ['FeatureFlag', FeatureFlag], ]
+ Root = CreateXmlElement('%s' % Key, SourceFile.GetSourceFile(), [], AttributeList)
+ return Root
+
+##
+# ModulePropertyXml
+#
+class ModulePropertyXml(object):
+ def __init__(self):
+ self.CommonDefines = CommonDefinesXml()
+ self.ModuleType = ''
+ self.Path = ''
+ self.PcdIsDriver = ''
+ self.UefiSpecificationVersion = ''
+ self.PiSpecificationVersion = ''
+ self.SpecificationList = []
+ self.SpecificationVersion = ''
+ self.BootModes = []
+ self.Events = []
+ self.HOBs = []
+
+ def FromXml(self, Item, Key, Header=None):
+ self.CommonDefines.FromXml(Item, Key)
+ self.ModuleType = XmlElement(Item, '%s/ModuleType' % Key)
+ self.Path = XmlElement(Item, '%s/Path' % Key)
+ self.PcdIsDriver = XmlElement(Item, '%s/PcdIsDriver' % Key)
+ self.UefiSpecificationVersion = XmlElement(Item, '%s/UefiSpecificationVersion' % Key)
+ self.PiSpecificationVersion = XmlElement(Item, '%s/PiSpecificationVersion' % Key)
+ for SubItem in XmlList(Item, '%s/Specification' % Key):
+ Specification = XmlElement(SubItem, '/Specification')
+ Version = XmlAttribute(XmlNode(SubItem, '/Specification'), 'Version')
+ self.SpecificationList.append((Specification, Version))
+ for SubItem in XmlList(Item, '%s/BootMode' % Key):
+ Axml = BootModeXml()
+ BootMode = Axml.FromXml(SubItem, 'BootMode')
+ self.BootModes.append(BootMode)
+ for SubItem in XmlList(Item, '%s/Event' % Key):
+ Axml = EventXml()
+ Event = Axml.FromXml(SubItem, 'Event')
+ self.Events.append(Event)
+ for SubItem in XmlList(Item, '%s/HOB' % Key):
+ Axml = HobXml()
+ Hob = Axml.FromXml(SubItem, 'HOB')
+ self.HOBs.append(Hob)
+
+ if Header is None:
+ Header = ModuleObject()
+
+ Header.SetModuleType(self.ModuleType)
+ Header.SetSupArchList(self.CommonDefines.SupArchList)
+ Header.SetModulePath(self.Path)
+
+ Header.SetPcdIsDriver(self.PcdIsDriver)
+ Header.SetUefiSpecificationVersion(self.UefiSpecificationVersion)
+ Header.SetPiSpecificationVersion(self.PiSpecificationVersion)
+ Header.SetSpecList(self.SpecificationList)
+
+ return Header, self.BootModes, self.Events, self.HOBs
+
+
+ def ToXml(self, Header, BootModes, Events, Hobs, Key):
+ if self.ModuleType:
+ pass
+ AttributeList = [['SupArchList', GetStringOfList(Header.GetSupArchList())], ]
+
+ NodeList = [['ModuleType', Header.GetModuleType()],
+ ['Path', Header.GetModulePath()],
+ ['PcdIsDriver', Header.GetPcdIsDriver()],
+ ['UefiSpecificationVersion', Header.GetUefiSpecificationVersion()],
+ ['PiSpecificationVersion', Header.GetPiSpecificationVersion()],
+ ]
+ for Item in Header.GetSpecList():
+ Spec, Version = Item
+ SpecElem = CreateXmlElement('Specification', Spec, [], [['Version', Version]])
+ NodeList.append(SpecElem)
+
+ for Item in BootModes:
+ Tmp = BootModeXml()
+ NodeList.append(Tmp.ToXml(Item, 'BootMode'))
+ for Item in Events:
+ Tmp = EventXml()
+ NodeList.append(Tmp.ToXml(Item, 'Event'))
+ for Item in Hobs:
+ Tmp = HobXml()
+ NodeList.append(Tmp.ToXml(Item, 'HOB'))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "ModuleType = %s Path = %s PcdIsDriver = %s UefiSpecificationVersion = %s PiSpecificationVersion = %s \
+ Specification = %s SpecificationVersion = %s %s" % \
+ (self.ModuleType, self.Path, self.PcdIsDriver, \
+ self.UefiSpecificationVersion, self.PiSpecificationVersion, \
+ self.SpecificationList, self.SpecificationVersion, self.CommonDefines)
+ for Item in self.BootModes:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.Events:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.HOBs:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+##
+# ModuleXml
+#
+class ModuleSurfaceAreaXml(object):
+ def __init__(self, Package=''):
+ self.Module = None
+ #
+ # indicate the package that this module resides in
+ #
+ self.Package = Package
+
+ def FromXml2(self, Item, Module):
+ if self.Module:
+ pass
+ #
+ # PeiDepex
+ #
+ PeiDepexList = []
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/PeiDepex'):
+ Tmp = DepexXml()
+ Depex = Tmp.FromXml(XmlNode(SubItem, 'PeiDepex'), 'PeiDepex')
+ PeiDepexList.append(Depex)
+ Module.SetPeiDepex(PeiDepexList)
+
+ #
+ # DxeDepex
+ #
+ DxeDepexList = []
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/DxeDepex'):
+ Tmp = DepexXml()
+ Depex = Tmp.FromXml(XmlNode(SubItem, 'DxeDepex'), 'DxeDepex')
+ DxeDepexList.append(Depex)
+ Module.SetDxeDepex(DxeDepexList)
+
+ #
+ # SmmDepex
+ #
+ SmmDepexList = []
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/SmmDepex'):
+ Tmp = DepexXml()
+ Depex = Tmp.FromXml(XmlNode(SubItem, 'SmmDepex'), 'SmmDepex')
+ SmmDepexList.append(Depex)
+ Module.SetSmmDepex(SmmDepexList)
+
+ #
+ # MiscellaneousFile
+ Tmp = MiscellaneousFileXml()
+ MiscFileList = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/MiscellaneousFiles'), 'MiscellaneousFiles')
+ if MiscFileList:
+ Module.SetMiscFileList([MiscFileList])
+ else:
+ Module.SetMiscFileList([])
+
+ #
+ # UserExtensions
+ #
+ for Item in XmlList(Item, '/ModuleSurfaceArea/UserExtensions'):
+ Tmp = UserExtensionsXml()
+ UserExtension = Tmp.FromXml(Item, 'UserExtensions')
+ Module.SetUserExtensionList(Module.GetUserExtensionList() + [UserExtension])
+
+ return Module
+
+ def FromXml(self, Item, Key, IsStandAlongModule=False):
+ IsBinaryModule = XmlAttribute(Item, 'BinaryModule')
+ #
+ # Header
+ #
+ Tmp = HeaderXml()
+ Module = Tmp.FromXml(XmlNode(Item, '/%s/Header' % Key), 'Header', True, IsStandAlongModule)
+ Module.SetBinaryModule(IsBinaryModule)
+
+ if IsBinaryModule:
+ GlobalData.gIS_BINARY_INF = True
+
+ #
+ # ModuleProperties
+ #
+ Tmp = ModulePropertyXml()
+ (Module, BootModes, Events, HOBs) = \
+ Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/ModuleProperties'), 'ModuleProperties', Module)
+ Module.SetBootModeList(BootModes)
+ Module.SetEventList(Events)
+ Module.SetHobList(HOBs)
+ #
+ # ClonedFrom
+ #
+ Tmp = ClonedFromXml()
+ ClonedFrom = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/ClonedFrom'), 'ClonedFrom')
+ if ClonedFrom:
+ Module.SetClonedFrom(ClonedFrom)
+
+ #
+ # LibraryClass
+ #
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/LibraryClassDefinitions/LibraryClass'):
+ Tmp = LibraryClassXml()
+ LibraryClass = Tmp.FromXml(SubItem, 'LibraryClass')
+ Module.SetLibraryClassList(Module.GetLibraryClassList() + [LibraryClass])
+
+ if XmlList(Item, '/ModuleSurfaceArea/LibraryClassDefinitions') and \
+ not XmlList(Item, '/ModuleSurfaceArea/LibraryClassDefinitions/LibraryClass'):
+ Module.SetLibraryClassList([None])
+
+ #
+ # SourceFiles
+ #
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/SourceFiles/Filename'):
+ Tmp = SourceFileXml()
+ SourceFile = Tmp.FromXml(SubItem, 'Filename')
+ Module.SetSourceFileList(Module.GetSourceFileList() + [SourceFile])
+
+ if XmlList(Item, '/ModuleSurfaceArea/SourceFiles') and \
+ not XmlList(Item, '/ModuleSurfaceArea/SourceFiles/Filename') :
+ Module.SetSourceFileList([None])
+
+ #
+ # BinaryFile
+ #
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/BinaryFiles/BinaryFile'):
+ Tmp = BinaryFileXml()
+ BinaryFile = Tmp.FromXml(SubItem, 'BinaryFile')
+ Module.SetBinaryFileList(Module.GetBinaryFileList() + [BinaryFile])
+
+ if XmlList(Item, '/ModuleSurfaceArea/BinaryFiles') and \
+ not XmlList(Item, '/ModuleSurfaceArea/BinaryFiles/BinaryFile') :
+ Module.SetBinaryFileList([None])
+ #
+ # PackageDependencies
+ #
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/PackageDependencies/Package'):
+ Tmp = PackageXml()
+ PackageDependency = Tmp.FromXml(SubItem, 'Package')
+ Module.SetPackageDependencyList(Module.GetPackageDependencyList() + [PackageDependency])
+
+ if XmlList(Item, '/ModuleSurfaceArea/PackageDependencies') and \
+ not XmlList(Item, '/ModuleSurfaceArea/PackageDependencies/Package'):
+ Module.SetPackageDependencyList([None])
+
+ #
+ # Guid
+ #
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/Guids/GuidCName'):
+ Tmp = GuidXml('Module')
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'GuidCName')
+ Module.SetGuidList(Module.GetGuidList() + [GuidProtocolPpi])
+
+ if XmlList(Item, '/ModuleSurfaceArea/Guids') and not XmlList(Item, '/ModuleSurfaceArea/Guids/GuidCName'):
+ Module.SetGuidList([None])
+
+ #
+ # Protocol
+ #
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/Protocols/Protocol'):
+ Tmp = ProtocolXml('Module')
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Protocol')
+ Module.SetProtocolList(Module.GetProtocolList() + [GuidProtocolPpi])
+
+ if XmlList(Item, '/ModuleSurfaceArea/Protocols') and not XmlList(Item, '/ModuleSurfaceArea/Protocols/Protocol'):
+ Module.SetProtocolList([None])
+
+ #
+ # Ppi
+ #
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/PPIs/Ppi'):
+ Tmp = PpiXml('Module')
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Ppi')
+ Module.SetPpiList(Module.GetPpiList() + [GuidProtocolPpi])
+
+ if XmlList(Item, '/ModuleSurfaceArea/PPIs') and not XmlList(Item, '/ModuleSurfaceArea/PPIs/Ppi'):
+ Module.SetPpiList([None])
+
+ #
+ # Extern
+ #
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/Externs/Extern'):
+ Tmp = ExternXml()
+ Extern = Tmp.FromXml(SubItem, 'Extern')
+ Module.SetExternList(Module.GetExternList() + [Extern])
+
+ if XmlList(Item, '/ModuleSurfaceArea/Externs') and not XmlList(Item, '/ModuleSurfaceArea/Externs/Extern'):
+ Module.SetExternList([None])
+
+ if not Module.GetBinaryModule():
+ #
+ # PcdCoded
+ #
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/PcdCoded/PcdEntry'):
+ Tmp = PcdEntryXml()
+ PcdEntry = Tmp.FromXml3(SubItem, 'PcdEntry')
+ Module.SetPcdList(Module.GetPcdList() + [PcdEntry])
+
+ if XmlList(Item, '/ModuleSurfaceArea/PcdCoded') and \
+ not XmlList(Item, '/ModuleSurfaceArea/PcdCoded/PcdEntry'):
+ Module.SetPcdList([None])
+
+ Module = self.FromXml2(Item, Module)
+ #
+ # return the module object
+ #
+ self.Module = Module
+ return self.Module
+
+ def ToXml(self, Module):
+ if self.Package:
+ pass
+ #
+ # Create root node of module surface area
+ #
+ DomModule = minidom.Document().createElement('ModuleSurfaceArea')
+ if Module.GetBinaryModule():
+ DomModule.setAttribute('BinaryModule', 'true')
+
+ #
+ # Header
+ #
+ Tmp = HeaderXml()
+ DomModule.appendChild(Tmp.ToXml(Module, 'Header'))
+ #
+ # ModuleProperties
+ #
+ Tmp = ModulePropertyXml()
+ DomModule.appendChild(Tmp.ToXml(Module, Module.GetBootModeList(), Module.GetEventList(), Module.GetHobList(), \
+ 'ModuleProperties'))
+ #
+ # ClonedFrom
+ #
+ Tmp = ClonedFromXml()
+ if Module.GetClonedFrom():
+ DomModule.appendChild(Tmp.ToXml(Module.GetClonedFrom(), 'ClonedFrom'))
+ #
+ # LibraryClass
+ #
+ LibraryClassNode = CreateXmlElement('LibraryClassDefinitions', '', [], [])
+ for LibraryClass in Module.GetLibraryClassList():
+ Tmp = LibraryClassXml()
+ LibraryClassNode.appendChild(Tmp.ToXml2(LibraryClass, 'LibraryClass'))
+ DomModule.appendChild(LibraryClassNode)
+ #
+ # SourceFile
+ #
+ SourceFileNode = CreateXmlElement('SourceFiles', '', [], [])
+ for SourceFile in Module.GetSourceFileList():
+ Tmp = SourceFileXml()
+ SourceFileNode.appendChild(Tmp.ToXml(SourceFile, 'Filename'))
+ DomModule.appendChild(SourceFileNode)
+ #
+ # BinaryFile
+ #
+ BinaryFileNode = CreateXmlElement('BinaryFiles', '', [], [])
+ for BinaryFile in Module.GetBinaryFileList():
+ Tmp = BinaryFileXml()
+ BinaryFileNode.appendChild(Tmp.ToXml(BinaryFile, 'BinaryFile'))
+ DomModule.appendChild(BinaryFileNode)
+ #
+ # PackageDependencies
+ #
+ PackageDependencyNode = CreateXmlElement('PackageDependencies', '', [], [])
+ for PackageDependency in Module.GetPackageDependencyList():
+ Tmp = PackageXml()
+ PackageDependencyNode.appendChild(Tmp.ToXml(PackageDependency, 'Package'))
+ DomModule.appendChild(PackageDependencyNode)
+
+ #
+ # Guid
+ #
+ GuidProtocolPpiNode = CreateXmlElement('Guids', '', [], [])
+ for GuidProtocolPpi in Module.GetGuidList():
+ Tmp = GuidXml('Module')
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'GuidCName'))
+ DomModule.appendChild(GuidProtocolPpiNode)
+
+ #
+ # Protocol
+ #
+ GuidProtocolPpiNode = CreateXmlElement('Protocols', '', [], [])
+ for GuidProtocolPpi in Module.GetProtocolList():
+ Tmp = ProtocolXml('Module')
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Protocol'))
+ DomModule.appendChild(GuidProtocolPpiNode)
+
+ #
+ # Ppi
+ #
+ GuidProtocolPpiNode = CreateXmlElement('PPIs', '', [], [])
+ for GuidProtocolPpi in Module.GetPpiList():
+ Tmp = PpiXml('Module')
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Ppi'))
+ DomModule.appendChild(GuidProtocolPpiNode)
+ #
+ # Extern
+ #
+ ExternNode = CreateXmlElement('Externs', '', [], [])
+ for Extern in Module.GetExternList():
+ Tmp = ExternXml()
+ ExternNode.appendChild(Tmp.ToXml(Extern, 'Extern'))
+ DomModule.appendChild(ExternNode)
+ #
+ # PcdCoded
+ #
+ PcdEntryNode = CreateXmlElement('PcdCoded', '', [], [])
+ for PcdEntry in Module.GetPcdList():
+ Tmp = PcdEntryXml()
+ PcdEntryNode.appendChild(Tmp.ToXml3(PcdEntry, 'PcdEntry'))
+ DomModule.appendChild(PcdEntryNode)
+
+ #
+ # PeiDepex
+ #
+ if Module.GetPeiDepex():
+ for Item in Module.GetPeiDepex():
+ Tmp = DepexXml()
+ DomModule.appendChild(Tmp.ToXml(Item, 'PeiDepex'))
+
+ #
+ # DxeDepex
+ #
+ if Module.GetDxeDepex():
+ for Item in Module.GetDxeDepex():
+ Tmp = DepexXml()
+ DomModule.appendChild(Tmp.ToXml(Item, 'DxeDepex'))
+
+ #
+ # SmmDepex
+ #
+ if Module.GetSmmDepex():
+ for Item in Module.GetSmmDepex():
+ Tmp = DepexXml()
+ DomModule.appendChild(Tmp.ToXml(Item, 'SmmDepex'))
+
+ #
+ # MiscellaneousFile
+ #
+ if Module.GetMiscFileList():
+ Tmp = MiscellaneousFileXml()
+ DomModule.appendChild(Tmp.ToXml(Module.GetMiscFileList()[0], 'MiscellaneousFiles'))
+ #
+ # UserExtensions
+ #
+ if Module.GetUserExtensionList():
+ for UserExtension in Module.GetUserExtensionList():
+ Tmp = UserExtensionsXml()
+ DomModule.appendChild(Tmp.ToXml(UserExtension, 'UserExtensions'))
+
+ return DomModule
+
+##
+# BuildFlagXml used to generate BuildFlag for <AsBuilt>
+#
+class BuildFlagXml(object):
+ def __init__(self):
+ self.Target = ''
+ self.TagName = ''
+ self.Family = ''
+ self.AsBuiltFlags = ''
+
+ def FromXml(self, Item, Key):
+ self.Target = XmlElement(Item, '%s/Target' % Key)
+ self.TagName = XmlElement(Item, '%s/TagName' % Key)
+ self.Family = XmlElement(Item, '%s/Family' % Key)
+
+ BuildFlag = BinaryBuildFlagObject()
+
+ BuildFlag.SetTarget(self.Target)
+ BuildFlag.SetTagName(self.TagName)
+ BuildFlag.SetFamily(self.Family)
+
+ return BuildFlag
+
+ #
+ # For AsBuild INF usage
+ #
+ def FromXml2(self, Item, Key):
+ self.AsBuiltFlags = XmlElement(Item, '%s' % Key)
+
+ LineList = GetSplitValueList(self.AsBuiltFlags, '\n')
+ ReturnLine = ''
+ Count = 0
+ for Line in LineList:
+ if Count == 0:
+ ReturnLine = "# " + Line
+ else:
+ ReturnLine = ReturnLine + '\n' + '# ' + Line
+ Count += 1
+
+ BuildFlag = BinaryBuildFlagObject()
+ BuildFlag.SetAsBuiltOptionFlags(ReturnLine)
+
+ return BuildFlag
+
+ def ToXml(self, BuildFlag, Key):
+ if self.Target:
+ pass
+ AttributeList = []
+ NodeList = []
+ NodeList.append(['BuildFlags', BuildFlag])
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+ return Root
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/PackageSurfaceAreaXml.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/PackageSurfaceAreaXml.py
new file mode 100755
index 00000000..9cff02d3
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/PackageSurfaceAreaXml.py
@@ -0,0 +1,402 @@
+## @file
+# This file is used to parse a Package file of .PKG file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+PackageSurfaceAreaXml
+'''
+from xml.dom import minidom
+
+from Library.StringUtils import GetStringOfList
+from Library.Xml.XmlRoutines import XmlElement
+from Library.Xml.XmlRoutines import XmlNode
+from Library.Xml.XmlRoutines import XmlList
+from Library.Xml.XmlRoutines import CreateXmlElement
+from Object.POM.CommonObject import IncludeObject
+from Object.POM.CommonObject import TextObject
+from Object.POM.PackageObject import PackageObject
+from Xml.CommonXml import ClonedFromXml
+from Xml.CommonXml import PackageHeaderXml
+from Xml.CommonXml import HelpTextXml
+from Xml.CommonXml import CommonDefinesXml
+from Xml.CommonXml import LibraryClassXml
+from Xml.CommonXml import UserExtensionsXml
+from Xml.CommonXml import MiscellaneousFileXml
+from Xml.GuidProtocolPpiXml import GuidXml
+from Xml.GuidProtocolPpiXml import ProtocolXml
+from Xml.GuidProtocolPpiXml import PpiXml
+from Xml.ModuleSurfaceAreaXml import ModuleSurfaceAreaXml
+from Xml.PcdXml import PcdEntryXml
+
+##
+# IndustryStandardHeaderXml
+#
+class IndustryStandardHeaderXml(object):
+ def __init__(self):
+ self.HeaderFile = ''
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.HeaderFile = XmlElement(Item, '%s/HeaderFile' % Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Include = IncludeObject()
+ Include.SetFilePath(self.HeaderFile)
+ HelpTxt = TextObject()
+ HelpTxt.SetString(self.HelpText)
+ Include.SetHelpText(HelpTxt)
+
+ return Include
+
+ def ToXml(self, IndustryStandardHeader, Key):
+ if self.HeaderFile:
+ pass
+ AttributeList = []
+ NodeList = [['HeaderFile', IndustryStandardHeader.GetFilePath()]]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "HeaderFile = %s" % (self.HeaderFile)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ return Str
+##
+# PackageIncludeHeaderXml
+#
+class PackageIncludeHeaderXml(object):
+ def __init__(self):
+ self.HeaderFile = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.HeaderFile = XmlElement(Item, '%s/HeaderFile' % Key)
+ self.CommonDefines.FromXml(XmlNode(Item, '%s/HeaderFile' % Key), 'HeaderFile')
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Include = IncludeObject()
+ Include.SetFilePath(self.HeaderFile)
+ Include.SetSupArchList(self.CommonDefines.SupArchList)
+ HelpTxt = TextObject()
+ HelpTxt.SetString(self.HelpText)
+ Include.SetHelpText(HelpTxt)
+
+ return Include
+
+ def ToXml(self, PackageIncludeHeader, Key):
+ if self.HeaderFile:
+ pass
+ AttributeList = [['SupArchList', GetStringOfList(PackageIncludeHeader.GetSupArchList())], \
+ ['SupModList', GetStringOfList(PackageIncludeHeader.GetSupModuleList())], ]
+
+ HeaderFileNode = CreateXmlElement('HeaderFile', PackageIncludeHeader.FilePath, [], AttributeList)
+
+ NodeList = [HeaderFileNode]
+ for Item in PackageIncludeHeader.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, [])
+
+ return Root
+
+ def __str__(self):
+ Str = "HeaderFile = %s\n\t%s" % (self.HeaderFile, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ return Str
+
+##
+# PcdCheckXml
+#
+class PcdCheckXml(object):
+ def __init__(self):
+ self.PcdCheck = ''
+
+ def FromXml(self, Item, Key):
+ if Key:
+ pass
+ self.PcdCheck = XmlElement(Item, 'PcdCheck')
+
+ return self.PcdCheck
+
+ def ToXml(self, PcdCheck, Key):
+ if self.PcdCheck:
+ pass
+ Root = CreateXmlElement('%s' % Key, PcdCheck, [], [])
+ return Root
+
+ def __str__(self):
+ return "PcdCheck = %s" % (self.PcdCheck)
+
+##
+# PackageSurfaceAreaXml
+#
+class PackageSurfaceAreaXml(object):
+ def __init__(self):
+ self.Package = None
+
+ def FromXml(self, Item, Key):
+ if Key:
+ pass
+ #
+ # Create a package object
+ #
+ Package = PackageObject()
+ #
+ # Header
+ #
+ Tmp = PackageHeaderXml()
+ Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/Header'), 'Header', Package)
+ #
+ # ClonedFrom
+ #
+ Tmp = ClonedFromXml()
+ if XmlNode(Item, '/PackageSurfaceArea/ClonedFrom'):
+ ClonedFrom = Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/ClonedFrom'), 'ClonedFrom')
+ Package.SetClonedFromList([ClonedFrom])
+ #
+ # LibraryClass
+ #
+
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/LibraryClassDeclarations/LibraryClass'):
+ Tmp = LibraryClassXml()
+ LibraryClass = Tmp.FromXml(SubItem, 'LibraryClass')
+ Package.SetLibraryClassList(Package.GetLibraryClassList() + [LibraryClass])
+
+ if XmlList(Item, '/PackageSurfaceArea/LibraryClassDeclarations') and \
+ not XmlList(Item, '/PackageSurfaceArea/LibraryClassDeclarations/LibraryClass'):
+ Package.SetLibraryClassList([None])
+
+ #
+ # IndustryStandardHeader
+ #
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/IndustryStandardIncludes/IndustryStandardHeader'):
+ Tmp = IndustryStandardHeaderXml()
+ Include = Tmp.FromXml(SubItem, 'IndustryStandardHeader')
+ Package.SetStandardIncludeFileList(Package.GetStandardIncludeFileList() + [Include])
+
+ if XmlList(Item, '/PackageSurfaceArea/IndustryStandardIncludes') and \
+ not XmlList(Item, '/PackageSurfaceArea/IndustryStandardIncludes/IndustryStandardHeader'):
+ Package.SetStandardIncludeFileList([None])
+
+
+ #
+ # PackageHeader
+ #
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/PackageIncludes/PackageHeader'):
+ Tmp = PackageIncludeHeaderXml()
+ Include = Tmp.FromXml(SubItem, 'PackageHeader')
+ Package.SetPackageIncludeFileList(Package.GetPackageIncludeFileList() + [Include])
+
+ if XmlList(Item, '/PackageSurfaceArea/PackageIncludes') and not \
+ XmlList(Item, '/PackageSurfaceArea/PackageIncludes/PackageHeader'):
+ Package.SetPackageIncludeFileList([None])
+
+ #
+ # Guid
+ #
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/GuidDeclarations/Entry'):
+ Tmp = GuidXml('Package')
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Entry')
+ Package.SetGuidList(Package.GetGuidList() + [GuidProtocolPpi])
+
+ if XmlList(Item, '/PackageSurfaceArea/GuidDeclarations') and not \
+ XmlList(Item, '/PackageSurfaceArea/GuidDeclarations/Entry'):
+ Package.SetGuidList([None])
+
+ #
+ # Protocol
+ #
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/ProtocolDeclarations/Entry'):
+ Tmp = ProtocolXml('Package')
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Entry')
+ Package.SetProtocolList(Package.GetProtocolList() + [GuidProtocolPpi])
+
+ if XmlList(Item, '/PackageSurfaceArea/ProtocolDeclarations') and not \
+ XmlList(Item, '/PackageSurfaceArea/ProtocolDeclarations/Entry'):
+ Package.SetProtocolList([None])
+
+ #
+ # Ppi
+ #
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/PpiDeclarations/Entry'):
+ Tmp = PpiXml('Package')
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Entry')
+ Package.SetPpiList(Package.GetPpiList() + [GuidProtocolPpi])
+
+ if XmlList(Item, '/PackageSurfaceArea/PpiDeclarations') and not \
+ XmlList(Item, '/PackageSurfaceArea/PpiDeclarations/Entry'):
+ Package.SetPpiList([None])
+
+ #
+ # PcdEntry
+ #
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/PcdDeclarations/PcdEntry'):
+ Tmp = PcdEntryXml()
+ PcdEntry = Tmp.FromXml2(SubItem, 'PcdEntry')
+ Package.SetPcdList(Package.GetPcdList() + [PcdEntry])
+
+ #
+ # Get PcdErrorCommentDict from PcdError in PcdEntry Node
+ #
+ for PcdErrorObj in PcdEntry.GetPcdErrorsList():
+ PcdErrorMessageList = PcdErrorObj.GetErrorMessageList()
+ if PcdErrorMessageList:
+ Package.PcdErrorCommentDict[(PcdEntry.GetTokenSpaceGuidCName(), PcdErrorObj.GetErrorNumber())] = \
+ PcdErrorMessageList
+
+
+ if XmlList(Item, '/PackageSurfaceArea/PcdDeclarations') and not \
+ XmlList(Item, '/PackageSurfaceArea/PcdDeclarations/PcdEntry'):
+ Package.SetPcdList([None])
+
+ #
+ # PcdCheck
+ #
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/PcdRelationshipChecks/PcdCheck'):
+ Tmp = PcdCheckXml()
+ PcdCheck = Tmp.FromXml(SubItem, 'PcdCheck')
+ Package.PcdChecks.append(PcdCheck)
+
+ #
+ # Modules
+ #
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/Modules/ModuleSurfaceArea'):
+ Tmp = ModuleSurfaceAreaXml()
+ Module = Tmp.FromXml(SubItem, 'ModuleSurfaceArea')
+ ModuleDictKey = (Module.GetGuid(), Module.GetVersion(), Module.GetName(), Module.GetModulePath())
+ Package.ModuleDict[ModuleDictKey] = Module
+ #
+ # MiscellaneousFile
+ #
+ Tmp = MiscellaneousFileXml()
+ MiscFileList = Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/MiscellaneousFiles'), 'MiscellaneousFiles')
+ if MiscFileList:
+ Package.SetMiscFileList([MiscFileList])
+ else:
+ Package.SetMiscFileList([])
+
+ #
+ # UserExtensions
+ #
+ for Item in XmlList(Item, '/PackageSurfaceArea/UserExtensions'):
+ Tmp = UserExtensionsXml()
+ UserExtension = Tmp.FromXml(Item, 'UserExtensions')
+ Package.UserExtensionList.append(UserExtension)
+
+ self.Package = Package
+ return self.Package
+
+ def ToXml(self, Package):
+ if self.Package:
+ pass
+ #
+ # Create PackageSurfaceArea node
+ #
+ DomPackage = minidom.Document().createElement('PackageSurfaceArea')
+ #
+ # Header
+ #
+ Tmp = PackageHeaderXml()
+ DomPackage.appendChild(Tmp.ToXml(Package, 'Header'))
+ #
+ # ClonedFrom
+ #
+ Tmp = ClonedFromXml()
+ if Package.GetClonedFromList() != []:
+ DomPackage.appendChild(Tmp.ToXml(Package.GetClonedFromList[0], 'ClonedFrom'))
+ #
+ # LibraryClass
+ #
+ LibraryClassNode = CreateXmlElement('LibraryClassDeclarations', '', [], [])
+ for LibraryClass in Package.GetLibraryClassList():
+ Tmp = LibraryClassXml()
+ LibraryClassNode.appendChild(Tmp.ToXml(LibraryClass, 'LibraryClass'))
+ DomPackage.appendChild(LibraryClassNode)
+ #
+ # IndustryStandardHeader
+ #
+ IndustryStandardHeaderNode = CreateXmlElement('IndustryStandardIncludes', '', [], [])
+ for Include in Package.GetStandardIncludeFileList():
+ Tmp = IndustryStandardHeaderXml()
+ IndustryStandardHeaderNode.appendChild(Tmp.ToXml(Include, 'IndustryStandardHeader'))
+ DomPackage.appendChild(IndustryStandardHeaderNode)
+ #
+ # PackageHeader
+ #
+ PackageIncludeHeaderNode = CreateXmlElement('PackageIncludes', '', [], [])
+ for Include in Package.GetPackageIncludeFileList():
+ Tmp = PackageIncludeHeaderXml()
+ PackageIncludeHeaderNode.appendChild(Tmp.ToXml(Include, 'PackageHeader'))
+ DomPackage.appendChild(PackageIncludeHeaderNode)
+ ModuleNode = CreateXmlElement('Modules', '', [], [])
+ for Module in Package.GetModuleDict().values():
+ Tmp = ModuleSurfaceAreaXml()
+ ModuleNode.appendChild(Tmp.ToXml(Module))
+ DomPackage.appendChild(ModuleNode)
+ #
+ # Guid
+ #
+ GuidProtocolPpiNode = CreateXmlElement('GuidDeclarations', '', [], [])
+ for GuidProtocolPpi in Package.GetGuidList():
+ Tmp = GuidXml('Package')
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml\
+ (GuidProtocolPpi, 'Entry'))
+ DomPackage.appendChild(GuidProtocolPpiNode)
+ #
+ # Protocol
+ #
+ GuidProtocolPpiNode = \
+ CreateXmlElement('ProtocolDeclarations', '', [], [])
+ for GuidProtocolPpi in Package.GetProtocolList():
+ Tmp = ProtocolXml('Package')
+ GuidProtocolPpiNode.appendChild\
+ (Tmp.ToXml(GuidProtocolPpi, 'Entry'))
+ DomPackage.appendChild(GuidProtocolPpiNode)
+ #
+ # Ppi
+ #
+ GuidProtocolPpiNode = CreateXmlElement('PpiDeclarations', '', [], [])
+ for GuidProtocolPpi in Package.GetPpiList():
+ Tmp = PpiXml('Package')
+ GuidProtocolPpiNode.appendChild\
+ (Tmp.ToXml(GuidProtocolPpi, 'Entry'))
+ DomPackage.appendChild(GuidProtocolPpiNode)
+ #
+ # PcdEntry
+ #
+ PcdEntryNode = CreateXmlElement('PcdDeclarations', '', [], [])
+ for PcdEntry in Package.GetPcdList():
+ Tmp = PcdEntryXml()
+ PcdEntryNode.appendChild(Tmp.ToXml2(PcdEntry, 'PcdEntry'))
+ DomPackage.appendChild(PcdEntryNode)
+
+ #
+ # MiscellaneousFile
+ #
+ Tmp = MiscellaneousFileXml()
+ if Package.GetMiscFileList():
+ DomPackage.appendChild(Tmp.ToXml(Package.GetMiscFileList()[0], 'MiscellaneousFiles'))
+
+ #
+ # UserExtensions
+ #
+ if Package.GetUserExtensionList():
+ for UserExtension in Package.GetUserExtensionList():
+ Tmp = UserExtensionsXml()
+ DomPackage.appendChild(Tmp.ToXml(UserExtension, 'UserExtensions'))
+
+ return DomPackage
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/PcdXml.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/PcdXml.py
new file mode 100755
index 00000000..3fa0766a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/PcdXml.py
@@ -0,0 +1,555 @@
+## @file
+# This file is used to parse a PCD file of .PKG file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+PcdXml
+'''
+
+##
+# Import Modules
+#
+
+from Library.Xml.XmlRoutines import XmlElement
+from Library.Xml.XmlRoutines import XmlAttribute
+from Library.Xml.XmlRoutines import XmlNode
+from Library.Xml.XmlRoutines import CreateXmlElement
+from Library.Xml.XmlRoutines import XmlList
+from Library.StringUtils import GetStringOfList
+from Library.StringUtils import ConvertNEToNOTEQ
+from Library.StringUtils import ConvertNOTEQToNE
+from Library import GlobalData
+from Object.POM.CommonObject import PcdObject
+from Object.POM.CommonObject import PcdErrorObject
+from Xml.CommonXml import HelpTextXml
+from Xml.CommonXml import PromptXml
+from Xml.CommonXml import CommonDefinesXml
+from Xml.XmlParserMisc import GetHelpTextList
+from Xml.XmlParserMisc import GetPromptList
+import re
+
+##
+# PcdErrorXml
+#
+class PcdErrorXml(object):
+ def __init__(self):
+ self.ValidValueList = ''
+ self.ValidValueListLang = ''
+ self.ValidValueRange = ''
+ self.Expression = ''
+ self.ErrorNumber = ''
+ self.ErrorMessage = []
+
+ def FromXml(self, Item, Key):
+ self.ValidValueList = XmlElement(Item, '%s/ValidValueList' % Key)
+ self.ValidValueListLang = \
+ XmlAttribute(XmlNode(Item, '%s/ValidValueList' % Key), 'Lang')
+ self.ValidValueRange = self.TransferValidEpxr2ValidRange(XmlElement(Item, '%s/ValidValueRange' % Key))
+ self.Expression = XmlElement(Item, '%s/Expression' % Key)
+ self.ErrorNumber = XmlElement(Item, '%s/ErrorNumber' % Key)
+ for ErrMsg in XmlList(Item, '%s/ErrorMessage' % Key):
+ ErrorMessageString = XmlElement(ErrMsg, 'ErrorMessage')
+ ErrorMessageLang = \
+ XmlAttribute(XmlNode(ErrMsg, 'ErrorMessage'), 'Lang')
+ self.ErrorMessage.append((ErrorMessageLang, ErrorMessageString))
+
+ Error = PcdErrorObject()
+ Error.SetValidValue(self.ValidValueList)
+ Error.SetValidValueLang(self.ValidValueListLang)
+ Error.SetValidValueRange(self.ValidValueRange)
+ Error.SetExpression(self.Expression)
+ Error.SetErrorNumber(self.ErrorNumber)
+ Error.SetErrorMessageList(self.ErrorMessage)
+
+ return Error
+
+ def ToXml(self, PcdError, Key):
+ if self.Expression:
+ pass
+ AttributeList = []
+ NodeList = []
+ if PcdError.GetValidValue():
+ Element1 = \
+ CreateXmlElement('ValidValueList', PcdError.GetValidValue(), [], \
+ [['Lang', PcdError.GetValidValueLang()]])
+ NodeList.append(Element1)
+ if PcdError.GetValidValueRange():
+ TansferedRangeStr = self.TransferValidRange2Expr(PcdError.GetTokenSpaceGuidCName(),
+ PcdError.GetCName(),
+ PcdError.GetValidValueRange())
+ Element1 = \
+ CreateXmlElement('ValidValueRange', \
+ TansferedRangeStr, [], [])
+ NodeList.append(Element1)
+ if PcdError.GetExpression():
+ NodeList.append(['Expression', PcdError.GetExpression()])
+ if PcdError.GetErrorNumber():
+ NodeList.append(['ErrorNumber', PcdError.GetErrorNumber()])
+ for Item in PcdError.GetErrorMessageList():
+ Element = \
+ CreateXmlElement('ErrorMessage', Item[1], [], [['Lang', Item[0]]])
+ NodeList.append(Element)
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def TransferValidRange2Expr(self, TokenSpaceGuidCName, CName, ValidRange):
+ if self.Expression:
+ pass
+ INT_RANGE_PATTERN1 = '[\t\s]*[0-9]+[\t\s]*-[\t\s]*[0-9]+'
+ INT_RANGE_PATTERN2 = '[\t\s]*(LT|GT|LE|GE|XOR|EQ)[\t\s]+\d+[\t\s]*'
+ HEX_RANGE_PATTERN1 = \
+ '[\t\s]*0[xX][a-fA-F0-9]+[\t\s]*-[\t\s]*0[xX][a-fA-F0-9]+'
+ HEX_RANGE_PATTERN2 = '[\t\s]*(LT|GT|LE|GE|XOR|EQ)[\t\s]+0[xX][a-fA-F0-9]+[\t\s]*'
+ IntMatch1 = re.compile(INT_RANGE_PATTERN1)
+ IntMatch2 = re.compile(INT_RANGE_PATTERN2)
+ HexMatch1 = re.compile(HEX_RANGE_PATTERN1)
+ HexMatch2 = re.compile(HEX_RANGE_PATTERN2)
+ PcdName = '.'.join([TokenSpaceGuidCName, CName])
+ HexMatchedList = []
+ IntMatchedList = []
+ #
+ # Convert HEX2 format range
+ #
+ if HexMatch2:
+ for MatchObj in HexMatch2.finditer(ValidRange):
+ MatchStr = MatchObj.group()
+ TransferedRangeStr = ' '.join(['', PcdName, MatchStr.strip()])
+ ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
+ #
+ # Convert INT2 format range
+ #
+ if IntMatch2:
+ for MatchObj in IntMatch2.finditer(ValidRange):
+ MatchStr = MatchObj.group()
+ TransferedRangeStr = ' '.join(['', PcdName, MatchStr.strip()])
+ ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
+ #
+ # Convert HEX1 format range
+ #
+ if HexMatch1:
+ HexMatchedList += HexMatch1.findall(ValidRange)
+
+ for MatchStr in HexMatchedList:
+ RangeItemList = MatchStr.strip().split('-')
+ TransferedRangeStr = '(%s GE %s) AND (%s LE %s)' % \
+ (PcdName, RangeItemList[0].strip(), PcdName, RangeItemList[1].strip())
+ ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
+ #
+ # Convert INT1 format range
+ #
+ if IntMatch1:
+ IntMatchedList += IntMatch1.findall(ValidRange)
+
+ for MatchStr in IntMatchedList:
+ RangeItemList = MatchStr.strip().split('-')
+ TransferedRangeStr = '(%s GE %s) AND (%s LE %s)' % \
+ (PcdName, RangeItemList[0].strip(), PcdName, RangeItemList[1].strip())
+ ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
+
+ return ValidRange
+
+ def TransferValidEpxr2ValidRange(self, ValidRangeExpr):
+ if self.Expression:
+ pass
+
+ PCD_PATTERN = \
+ '[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*\.[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*'
+ IntPattern1 = \
+ '[\t\s]*\([\t\s]*'+PCD_PATTERN+'[\t\s]+GE[\t\s]+\d+[\t\s]*\)[\t\s]+AND[\t\s]+\([\t\s]*'+\
+ PCD_PATTERN+'[\t\s]+LE[\t\s]+\d+[\t\s]*\)'
+ IntPattern1 = IntPattern1.replace(' ', '')
+ IntPattern2 = '[\t\s]*'+PCD_PATTERN+'[\t\s]+(LT|GT|LE|GE|XOR|EQ)[\t\s]+\d+[\t\s]*'
+
+ HexPattern1 = \
+ '[\t\s]*\([\t\s]*'+PCD_PATTERN+'[\t\s]+GE[\t\s]+0[xX][0-9a-fA-F]+[\t\s]*\)[\t\s]+AND[\t\s]+\([\t\s]*'+\
+ PCD_PATTERN+'[\t\s]+LE[\t\s]+0[xX][0-9a-fA-F]+[\t\s]*\)'
+ HexPattern1 = HexPattern1.replace(' ', '')
+ HexPattern2 = '[\t\s]*'+PCD_PATTERN+'[\t\s]+(LT|GT|LE|GE|XOR|EQ)[\t\s]+0[xX][0-9a-zA-Z]+[\t\s]*'
+
+ #
+ # Do the Hex1 conversion
+ #
+ HexMatchedList = re.compile(HexPattern1).findall(ValidRangeExpr)
+ HexRangeDict = {}
+ for HexMatchedItem in HexMatchedList:
+ #
+ # To match items on both sides of '-'
+ #
+ RangeItemList = re.compile('[\t\s]*0[xX][0-9a-fA-F]+[\t\s]*').findall(HexMatchedItem)
+ if RangeItemList and len(RangeItemList) == 2:
+ HexRangeDict[HexMatchedItem] = RangeItemList
+
+ for Key in HexRangeDict.keys():
+ MaxItem = MixItem = ''
+ if int(HexRangeDict[Key][0], 16) > int(HexRangeDict[Key][1], 16):
+ MaxItem = HexRangeDict[Key][0]
+ MixItem = HexRangeDict[Key][1]
+ else:
+ MaxItem = HexRangeDict[Key][1]
+ MixItem = HexRangeDict[Key][0]
+
+ Range = ' %s - %s' % (MixItem.strip(), MaxItem.strip())
+ ValidRangeExpr = ValidRangeExpr.replace(Key, Range)
+ #
+ # Do the INT1 conversion
+ #
+ IntRangeDict = {}
+ IntMatchList = re.compile(IntPattern1).findall(ValidRangeExpr)
+ for MatchedItem in IntMatchList:
+ #
+ # To match items on both sides of '-'
+ #
+ RangeItemList = re.compile('[\t\s]*\d+[\t\s]*').findall(MatchedItem)
+ if RangeItemList and len(RangeItemList) == 2:
+ IntRangeDict[MatchedItem] = RangeItemList
+
+ for Key in IntRangeDict.keys():
+ MaxItem = MixItem = ''
+ if int(IntRangeDict[Key][0]) > int(IntRangeDict[Key][1]):
+ MaxItem = IntRangeDict[Key][0]
+ MixItem = IntRangeDict[Key][1]
+ else:
+ MaxItem = IntRangeDict[Key][1]
+ MixItem = IntRangeDict[Key][0]
+
+ Range = ' %s - %s' % (MixItem.strip(), MaxItem.strip())
+ ValidRangeExpr = ValidRangeExpr.replace(Key, Range)
+ #
+ # Do the HEX2 conversion
+ #
+ for MatchObj in re.compile(HexPattern2).finditer(ValidRangeExpr):
+ MatchStr = MatchObj.group()
+ Range = re.compile(PCD_PATTERN).sub(' ', MatchStr)
+ ValidRangeExpr = ValidRangeExpr.replace(MatchStr, Range)
+ #
+ # Do the INT2 conversion
+ #
+ for MatchObj in re.compile(IntPattern2).finditer(ValidRangeExpr):
+ MatchStr = MatchObj.group()
+ Range = re.compile(PCD_PATTERN).sub(' ', MatchStr)
+ ValidRangeExpr = ValidRangeExpr.replace(MatchStr, Range)
+
+ return ValidRangeExpr
+
+
+
+ def __str__(self):
+ return "ValidValueList = %s ValidValueListLang = %s ValidValueRange \
+ = %s Expression = %s ErrorNumber = %s %s" % \
+ (self.ValidValueList, self.ValidValueListLang, self.ValidValueRange, \
+ self.Expression, self.ErrorNumber, self.ErrorMessage)
+
+##
+# PcdEntryXml
+#
+class PcdEntryXml(object):
+ def __init__(self):
+ self.PcdItemType = ''
+ self.PcdUsage = ''
+ self.TokenSpaceGuidCName = ''
+ self.TokenSpaceGuidValue = ''
+ self.Token = ''
+ self.CName = ''
+ self.PcdCName = ''
+ self.DatumType = ''
+ self.ValidUsage = ''
+ self.DefaultValue = ''
+ self.MaxDatumSize = ''
+ self.Value = ''
+ self.Offset = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.Prompt = []
+ self.HelpText = []
+ self.PcdError = []
+
+ ##
+ # AsBuilt will use FromXml
+ #
+ def FromXml(self, Item, Key):
+ self.PcdItemType = \
+ XmlAttribute(XmlNode(Item, '%s' % Key), 'PcdItemType')
+ self.PcdUsage = XmlAttribute(XmlNode(Item, '%s' % Key), 'PcdUsage')
+ self.TokenSpaceGuidCName = \
+ XmlElement(Item, '%s/TokenSpaceGuidCname' % Key)
+ self.TokenSpaceGuidValue = \
+ XmlElement(Item, '%s/TokenSpaceGuidValue' % Key)
+ self.Token = XmlElement(Item, '%s/Token' % Key)
+ self.CName = XmlElement(Item, '%s/CName' % Key)
+ self.PcdCName = XmlElement(Item, '%s/PcdCName' % Key)
+ self.DatumType = XmlElement(Item, '%s/DatumType' % Key)
+ self.ValidUsage = XmlElement(Item, '%s/ValidUsage' % Key)
+ if not GlobalData.gIS_BINARY_INF:
+ self.DefaultValue = XmlElement(Item, '%s/DefaultValue' % Key)
+ else:
+ self.DefaultValue = XmlElement(Item, '%s/Value' % Key)
+ self.MaxDatumSize = XmlElement(Item, '%s/MaxDatumSize' % Key)
+ self.Value = XmlElement(Item, '%s/Value' % Key)
+ self.Offset = XmlElement(Item, '%s/Offset' % Key)
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+ for PcdErrorItem in XmlList(Item, '%s/PcdError' % Key):
+ PcdErrorObjXml = PcdErrorXml()
+ PcdErrorObj = PcdErrorObjXml.FromXml(PcdErrorItem, 'PcdError')
+ self.PcdError.append(PcdErrorObj)
+
+ self.DefaultValue = ConvertNOTEQToNE(self.DefaultValue)
+
+ PcdEntry = PcdObject()
+ PcdEntry.SetSupArchList(self.CommonDefines.SupArchList)
+ PcdEntry.SetTokenSpaceGuidCName(self.TokenSpaceGuidCName)
+ PcdEntry.SetTokenSpaceGuidValue(self.TokenSpaceGuidValue)
+ PcdEntry.SetToken(self.Token)
+ PcdEntry.SetOffset(self.Offset)
+ PcdEntry.SetCName(self.CName)
+ PcdEntry.SetPcdCName(self.PcdCName)
+ PcdEntry.SetDatumType(self.DatumType)
+ PcdEntry.SetValidUsage(self.ValidUsage)
+ PcdEntry.SetDefaultValue(self.DefaultValue)
+ PcdEntry.SetMaxDatumSize(self.MaxDatumSize)
+ PcdEntry.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
+ PcdEntry.SetItemType(self.PcdItemType)
+
+ PcdEntry.SetHelpTextList(GetHelpTextList(self.HelpText))
+ PcdEntry.SetPcdErrorsList(self.PcdError)
+
+ return PcdEntry
+ ##
+ # Package will use FromXml2
+ #
+ def FromXml2(self, Item, Key):
+ self.TokenSpaceGuidCName = \
+ XmlElement(Item, '%s/TokenSpaceGuidCname' % Key)
+ self.Token = XmlElement(Item, '%s/Token' % Key)
+ self.CName = XmlElement(Item, '%s/CName' % Key)
+ self.DatumType = XmlElement(Item, '%s/DatumType' % Key)
+ self.ValidUsage = XmlElement(Item, '%s/ValidUsage' % Key)
+ self.DefaultValue = XmlElement(Item, '%s/DefaultValue' % Key)
+ self.MaxDatumSize = XmlElement(Item, '%s/MaxDatumSize' % Key)
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+ for PromptItem in XmlList(Item, '%s/Prompt' % Key):
+ PromptObj = PromptXml()
+ PromptObj.FromXml(PromptItem, '%s/Prompt' % Key)
+ self.Prompt.append(PromptObj)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+ for PcdErrorItem in XmlList(Item, '%s/PcdError' % Key):
+ PcdErrorObjXml = PcdErrorXml()
+ PcdErrorObj = PcdErrorObjXml.FromXml(PcdErrorItem, 'PcdError')
+ self.PcdError.append(PcdErrorObj)
+
+ self.DefaultValue = ConvertNOTEQToNE(self.DefaultValue)
+
+ PcdEntry = PcdObject()
+ PcdEntry.SetSupArchList(self.CommonDefines.SupArchList)
+ PcdEntry.SetSupModuleList(self.CommonDefines.SupModList)
+ PcdEntry.SetTokenSpaceGuidCName(self.TokenSpaceGuidCName)
+ PcdEntry.SetToken(self.Token)
+ PcdEntry.SetCName(self.CName)
+ PcdEntry.SetDatumType(self.DatumType)
+ PcdEntry.SetValidUsage(self.ValidUsage)
+ PcdEntry.SetDefaultValue(self.DefaultValue)
+ PcdEntry.SetMaxDatumSize(self.MaxDatumSize)
+ PcdEntry.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
+
+ PcdEntry.SetPromptList(GetPromptList(self.Prompt))
+ PcdEntry.SetHelpTextList(GetHelpTextList(self.HelpText))
+ PcdEntry.SetPcdErrorsList(self.PcdError)
+
+ return PcdEntry
+
+ ##
+ # Module will use FromXml3
+ #
+ def FromXml3(self, Item, Key):
+ self.PcdItemType = \
+ XmlAttribute(XmlNode(Item, '%s' % Key), 'PcdItemType')
+ self.PcdUsage = XmlAttribute(XmlNode(Item, '%s' % Key), 'PcdUsage')
+ self.TokenSpaceGuidCName = \
+ XmlElement(Item, '%s/TokenSpaceGuidCName' % Key)
+ self.CName = XmlElement(Item, '%s/CName' % Key)
+ self.DefaultValue = XmlElement(Item, '%s/DefaultValue' % Key)
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+ for PcdErrorItem in XmlList(Item, '%s/PcdError' % Key):
+ PcdErrorObj = PcdErrorXml()
+ PcdErrorObj.FromXml(PcdErrorItem, 'PcdError')
+ self.PcdError.append(PcdErrorObj)
+
+ self.DefaultValue = ConvertNOTEQToNE(self.DefaultValue)
+
+ PcdEntry = PcdObject()
+ PcdEntry.SetSupArchList(self.CommonDefines.SupArchList)
+ PcdEntry.SetTokenSpaceGuidCName(self.TokenSpaceGuidCName)
+ PcdEntry.SetCName(self.CName)
+ PcdEntry.SetValidUsage(self.PcdUsage)
+ PcdEntry.SetDefaultValue(self.DefaultValue)
+ PcdEntry.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
+ PcdEntry.SetItemType(self.PcdItemType)
+
+ PcdEntry.SetHelpTextList(GetHelpTextList(self.HelpText))
+ PcdEntry.SetPcdErrorsList(self.PcdError)
+
+ return PcdEntry
+
+ def ToXml(self, PcdEntry, Key):
+ if self.PcdCName:
+ pass
+
+ DefaultValue = ConvertNEToNOTEQ(PcdEntry.GetDefaultValue())
+
+ AttributeList = \
+ [['SupArchList', GetStringOfList(PcdEntry.GetSupArchList())], \
+ ['PcdUsage', PcdEntry.GetValidUsage()], \
+ ['PcdItemType', PcdEntry.GetItemType()], \
+ ['FeatureFlag', PcdEntry.GetFeatureFlag()],
+ ]
+ NodeList = [['TokenSpaceGuidCname', PcdEntry.GetTokenSpaceGuidCName()],
+ ['TokenSpaceGuidValue', PcdEntry.GetTokenSpaceGuidValue()],
+ ['Token', PcdEntry.GetToken()],
+ ['CName', PcdEntry.GetCName()],
+ ['DatumType', PcdEntry.GetDatumType()],
+ ['ValidUsage', GetStringOfList(PcdEntry.GetValidUsage())],
+ ['DefaultValue', DefaultValue],
+ ['MaxDatumSize', PcdEntry.GetMaxDatumSize()],
+ ['Offset', PcdEntry.GetOffset()],
+ ]
+
+ for Item in PcdEntry.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ for Item in PcdEntry.GetPcdErrorsList():
+ Tmp = PcdErrorXml()
+ NodeList.append(Tmp.ToXml(Item, 'PcdError'))
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+ ##
+ # Package will use ToXml2
+ #
+ def ToXml2(self, PcdEntry, Key):
+ if self.PcdCName:
+ pass
+
+ DefaultValue = ConvertNEToNOTEQ(PcdEntry.GetDefaultValue())
+
+ AttributeList = \
+ [['SupArchList', GetStringOfList(PcdEntry.GetSupArchList())], \
+ ['SupModList', GetStringOfList(PcdEntry.GetSupModuleList())]
+ ]
+ NodeList = [['TokenSpaceGuidCname', PcdEntry.GetTokenSpaceGuidCName()],
+ ['Token', PcdEntry.GetToken()],
+ ['CName', PcdEntry.GetCName()],
+ ['DatumType', PcdEntry.GetDatumType()],
+ ['ValidUsage', GetStringOfList(PcdEntry.GetValidUsage())],
+ ['DefaultValue', DefaultValue],
+ ['MaxDatumSize', PcdEntry.GetMaxDatumSize()],
+ ]
+ for Item in PcdEntry.GetPromptList():
+ Tmp = PromptXml()
+ NodeList.append(Tmp.ToXml(Item))
+
+ for Item in PcdEntry.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+
+ for Item in PcdEntry.GetPcdErrorsList():
+ Tmp = PcdErrorXml()
+ NodeList.append(Tmp.ToXml(Item, 'PcdError'))
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+ ##
+ # Module will use ToXml3
+ #
+ def ToXml3(self, PcdEntry, Key):
+ if self.PcdCName:
+ pass
+
+ DefaultValue = ConvertNEToNOTEQ(PcdEntry.GetDefaultValue())
+
+ AttributeList = \
+ [['SupArchList', GetStringOfList(PcdEntry.GetSupArchList())], \
+ ['PcdUsage', PcdEntry.GetValidUsage()], \
+ ['PcdItemType', PcdEntry.GetItemType()], \
+ ['FeatureFlag', ConvertNEToNOTEQ(PcdEntry.GetFeatureFlag())],
+ ]
+ NodeList = [['CName', PcdEntry.GetCName()],
+ ['TokenSpaceGuidCName', PcdEntry.GetTokenSpaceGuidCName()],
+ ['DefaultValue', DefaultValue],
+ ]
+
+ for Item in PcdEntry.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ for Item in PcdEntry.GetPcdErrorsList():
+ Tmp = PcdErrorXml()
+ NodeList.append(Tmp.ToXml(Item, 'PcdError'))
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ ##
+ # AsBuild Module will use ToXml4
+ #
+ def ToXml4(self, PcdEntry, Key):
+ if self.PcdCName:
+ pass
+
+ DefaultValue = ConvertNEToNOTEQ(PcdEntry.GetDefaultValue())
+
+ AttributeList = []
+
+ NodeList = [
+ ['TokenSpaceGuidValue', PcdEntry.GetTokenSpaceGuidValue()],
+ ['PcdCName', PcdEntry.GetCName()],
+ ['Token', PcdEntry.GetToken()],
+ ['DatumType', PcdEntry.GetDatumType()],
+ ['MaxDatumSize', PcdEntry.GetMaxDatumSize()],
+ ['Value', DefaultValue],
+ ['Offset', PcdEntry.GetOffset()]
+ ]
+
+ for Item in PcdEntry.GetHelpTextList():
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ for Item in PcdEntry.GetPcdErrorsList():
+ Tmp = PcdErrorXml()
+ NodeList.append(Tmp.ToXml(Item, 'PcdError'))
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+
+ def __str__(self):
+ Str = \
+ ('PcdItemType = %s PcdUsage = %s TokenSpaceGuidCName = %s \
+ TokenSpaceGuidValue = %s Token = %s CName = %s PcdCName = %s \
+ DatumType = %s ValidUsage = %s DefaultValue = %s MaxDatumSize = %s \
+ Value = %s Offset = %s %s') % \
+ (self.PcdItemType, self.PcdUsage, self.TokenSpaceGuidCName, \
+ self.TokenSpaceGuidValue, self.Token, self.CName, self.PcdCName, \
+ self.DatumType, self.ValidUsage, self.DefaultValue, \
+ self.MaxDatumSize, self.Value, self.Offset, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ for Item in self.PcdError:
+ Str = Str + "\n\tPcdError:" + str(Item)
+ return Str
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/XmlParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/XmlParser.py
new file mode 100755
index 00000000..a252bf52
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/XmlParser.py
@@ -0,0 +1,926 @@
+## @file
+# This file is used to parse a xml file of .PKG file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+XmlParser
+'''
+
+##
+# Import Modules
+#
+import re
+
+from Library.Xml.XmlRoutines import XmlNode
+from Library.Xml.XmlRoutines import CreateXmlElement
+from Library.Xml.XmlRoutines import XmlList
+from Library.Xml.XmlRoutines import XmlParseFile
+from Core.DistributionPackageClass import DistributionPackageClass
+from Object.POM.ModuleObject import DepexObject
+from Library.ParserValidate import IsValidInfMoudleType
+from Library.ParserValidate import IsValidInstallPath
+from Library.Misc import IsEqualList
+from Library.Misc import Sdict
+
+from Logger.StringTable import ERR_XML_INVALID_VARIABLENAME
+from Logger.StringTable import ERR_XML_INVALID_LIB_SUPMODLIST
+from Logger.StringTable import ERR_XML_INVALID_EXTERN_SUPARCHLIST
+from Logger.StringTable import ERR_XML_INVALID_EXTERN_SUPMODLIST
+from Logger.StringTable import ERR_XML_INVALID_EXTERN_SUPMODLIST_NOT_LIB
+from Logger.StringTable import ERR_FILE_NAME_INVALIDE
+from Logger.ToolError import PARSER_ERROR
+from Logger.ToolError import FORMAT_INVALID
+
+from Xml.CommonXml import DistributionPackageHeaderXml
+from Xml.CommonXml import MiscellaneousFileXml
+from Xml.CommonXml import UserExtensionsXml
+from Xml.XmlParserMisc import ConvertVariableName
+from Xml.XmlParserMisc import IsRequiredItemListNull
+from Xml.ModuleSurfaceAreaXml import ModuleSurfaceAreaXml
+from Xml.PackageSurfaceAreaXml import PackageSurfaceAreaXml
+
+import Logger.Log as Logger
+
+##
+# DistributionPackageXml
+#
+class DistributionPackageXml(object):
+ def __init__(self):
+ self.DistP = DistributionPackageClass()
+ self.Pkg = ''
+
+ ## ValidateDistributionPackage
+ #
+ # Check if any required item is missing in DistributionPackage
+ #
+ def ValidateDistributionPackage(self):
+ XmlTreeLevel = ['DistributionPackage']
+ if self.DistP:
+ #
+ # Check DistributionPackage -> DistributionHeader
+ #
+ XmlTreeLevel = ['DistributionPackage', '']
+ CheckDict = {'DistributionHeader':self.DistP.Header }
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ if self.DistP.Header:
+ DpHeader = self.DistP.Header
+ XmlTreeLevel = ['DistributionPackage', 'DistributionHeader']
+ CheckDict = Sdict()
+ if DpHeader.GetAbstract():
+ DPAbstract = DpHeader.GetAbstract()[0][1]
+ else:
+ DPAbstract = ''
+ if DpHeader.GetCopyright():
+ DPCopyright = DpHeader.GetCopyright()[0][1]
+ else:
+ DPCopyright = ''
+ if DpHeader.GetLicense():
+ DPLicense = DpHeader.GetLicense()[0][1]
+ else:
+ DPLicense = ''
+
+ CheckDict['Name'] = DpHeader.GetName()
+ CheckDict['GUID'] = DpHeader.GetGuid()
+ CheckDict['Version'] = DpHeader.GetVersion()
+ CheckDict['Copyright'] = DPCopyright
+ CheckDict['License'] = DPLicense
+ CheckDict['Abstract'] = DPAbstract
+ CheckDict['Vendor'] = DpHeader.GetVendor()
+ CheckDict['Date'] = DpHeader.GetDate()
+ CheckDict['XmlSpecification'] = DpHeader.GetXmlSpecification()
+
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ else:
+ XmlTreeLevel = ['DistributionPackage', 'DistributionHeader']
+ CheckDict = CheckDict = {'DistributionHeader': '', }
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check Each Package
+ #
+ for Key in self.DistP.PackageSurfaceArea:
+ ValidatePackageSurfaceArea(self.DistP.PackageSurfaceArea[Key])
+
+ #
+ # Check Each Module
+ #
+ for Key in self.DistP.ModuleSurfaceArea:
+ ValidateMS(self.DistP.ModuleSurfaceArea[Key], ['DistributionPackage', 'ModuleSurfaceArea'])
+
+ #
+ # Check Each Tool
+ #
+ if self.DistP.Tools:
+ XmlTreeLevel = ['DistributionPackage', 'Tools', 'Header']
+ CheckDict = {'Name': self.DistP.Tools.GetName(), }
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ if not self.DistP.Tools.GetFileList():
+ XmlTreeLevel = ['DistributionPackage', 'Tools']
+ CheckDict = {'FileName': None, }
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ for Item in self.DistP.Tools.GetFileList():
+ XmlTreeLevel = ['DistributionPackage', 'Tools']
+ CheckDict = {'FileName': Item.GetURI(), }
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check Each Misc File
+ #
+ if self.DistP.MiscellaneousFiles:
+ XmlTreeLevel = ['DistributionPackage', 'MiscellaneousFiles', 'Header']
+ CheckDict = {'Name': self.DistP.MiscellaneousFiles.GetName(), }
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ if not self.DistP.MiscellaneousFiles.GetFileList():
+ XmlTreeLevel = ['DistributionPackage', 'MiscellaneousFiles']
+ CheckDict = {'FileName': None, }
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ for Item in self.DistP.MiscellaneousFiles.GetFileList():
+ XmlTreeLevel = ['DistributionPackage', 'MiscellaneousFiles']
+ CheckDict = {'FileName': Item.GetURI(), }
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check Each Distribution Level User Extension
+ #
+ for Item in self.DistP.UserExtensions:
+ XmlTreeLevel = ['DistributionPackage', 'UserExtensions']
+ CheckDict = {'UserId': Item.GetUserID(), }
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+
+ def FromXml(self, Filename=None):
+ if Filename is not None:
+ self.DistP = DistributionPackageClass()
+ #
+ # Load to XML
+ #
+ self.Pkg = XmlParseFile(Filename)
+
+ #
+ # Parse Header information
+ #
+ Tmp = DistributionPackageHeaderXml()
+ DistributionPackageHeader = \
+ Tmp.FromXml(XmlNode(self.Pkg, '/DistributionPackage/DistributionHeader'), 'DistributionHeader')
+ self.DistP.Header = DistributionPackageHeader
+ #
+ # Parse each PackageSurfaceArea
+ #
+ for Item in XmlList(self.Pkg, '/DistributionPackage/PackageSurfaceArea'):
+ Psa = PackageSurfaceAreaXml()
+ Package = Psa.FromXml(Item, 'PackageSurfaceArea')
+ self.DistP.PackageSurfaceArea[(Package.GetGuid(), \
+ Package.GetVersion(), \
+ Package.GetPackagePath())] = \
+ Package
+ #
+ # Parse each ModuleSurfaceArea
+ #
+ for Item in XmlList(self.Pkg, '/DistributionPackage/ModuleSurfaceArea'):
+ Msa = ModuleSurfaceAreaXml()
+ Module = Msa.FromXml(Item, 'ModuleSurfaceArea', True)
+ ModuleKey = (Module.GetGuid(), Module.GetVersion(), Module.GetName(), Module.GetModulePath())
+ self.DistP.ModuleSurfaceArea[ModuleKey] = Module
+
+ #
+ # Parse Tools
+ #
+ Tmp = MiscellaneousFileXml()
+ self.DistP.Tools = Tmp.FromXml2(XmlNode(self.Pkg, '/DistributionPackage/Tools'), 'Tools')
+
+ #
+ # Parse MiscFiles
+ #
+ Tmp = MiscellaneousFileXml()
+ self.DistP.MiscellaneousFiles = \
+ Tmp.FromXml2(XmlNode(self.Pkg, \
+ '/DistributionPackage/MiscellaneousFiles'), \
+ 'MiscellaneousFiles')
+
+ #
+ # Parse UserExtensions
+ #
+ for Item in XmlList(self.Pkg, '/DistributionPackage/UserExtensions'):
+ Tmp = UserExtensionsXml()
+ self.DistP.UserExtensions.append(Tmp.FromXml2(Item, 'UserExtensions'))
+
+ #
+ # Check Required Items for XML
+ #
+ self.ValidateDistributionPackage()
+
+ return self.DistP
+
+ def ToXml(self, DistP):
+ if self.DistP:
+ pass
+ if DistP is not None:
+ #
+ # Parse DistributionPackageHeader
+ #
+ Attrs = [['xmlns', 'http://www.uefi.org/2011/1.1'],
+ ['xmlns:xsi', 'http:/www.w3.org/2001/XMLSchema-instance'],
+ ]
+ Root = CreateXmlElement('DistributionPackage', '', [], Attrs)
+
+ Tmp = DistributionPackageHeaderXml()
+ Root.appendChild(Tmp.ToXml(DistP.Header, 'DistributionHeader'))
+ #
+ # Parse each PackageSurfaceArea
+ #
+ for Package in DistP.PackageSurfaceArea.values():
+ Psa = PackageSurfaceAreaXml()
+ DomPackage = Psa.ToXml(Package)
+ Root.appendChild(DomPackage)
+ #
+ # Parse each ModuleSurfaceArea
+ #
+ for Module in DistP.ModuleSurfaceArea.values():
+ Msa = ModuleSurfaceAreaXml()
+ DomModule = Msa.ToXml(Module)
+ Root.appendChild(DomModule)
+ #
+ # Parse Tools
+ #
+ Tmp = MiscellaneousFileXml()
+ ToolNode = Tmp.ToXml2(DistP.Tools, 'Tools')
+ if ToolNode is not None:
+ Root.appendChild(ToolNode)
+ #
+ # Parse MiscFiles
+ #
+ Tmp = MiscellaneousFileXml()
+ MiscFileNode = Tmp.ToXml2(DistP.MiscellaneousFiles,
+ 'MiscellaneousFiles')
+ if MiscFileNode is not None:
+ Root.appendChild(MiscFileNode)
+
+ XmlContent = Root.toprettyxml(indent=' ')
+
+
+ #
+ # Remove empty element
+ #
+ XmlContent = re.sub(r'[\s\r\n]*<[^<>=]*/>', '', XmlContent)
+
+ #
+ # Remove empty help text element
+ #
+ XmlContent = re.sub(r'[\s\r\n]*<HelpText Lang="en-US"/>', '',
+ XmlContent)
+
+ #
+ # Remove SupArchList="COMMON" or "common"
+ #
+ XmlContent = \
+ re.sub(r'[\s\r\n]*SupArchList[\s\r\n]*=[\s\r\n]*"[\s\r\n]*COMMON'
+ '[\s\r\n]*"', '', XmlContent)
+ XmlContent = \
+ re.sub(r'[\s\r\n]*SupArchList[\s\r\n]*=[\s\r\n]*"[\s\r\n]*common'
+ '[\s\r\n]*"', '', XmlContent)
+ #
+ # Remove <SupArchList> COMMON </SupArchList>
+ #
+ XmlContent = \
+ re.sub(r'[\s\r\n]*<SupArchList>[\s\r\n]*COMMON[\s\r\n]*'
+ '</SupArchList>[\s\r\n]*', '', XmlContent)
+
+ #
+ # Remove <SupArchList> common </SupArchList>
+ #
+ XmlContent = \
+ re.sub(r'[\s\r\n]*<SupArchList>[\s\r\n]*'
+ 'common[\s\r\n]*</SupArchList>[\s\r\n]*', '', XmlContent)
+
+ #
+ # Remove SupModList="COMMON" or "common"
+ #
+ XmlContent = \
+ re.sub(r'[\s\r\n]*SupModList[\s\r\n]*=[\s\r\n]*"[\s\r\n]*COMMON'
+ '[\s\r\n]*"', '', XmlContent)
+ XmlContent = \
+ re.sub(r'[\s\r\n]*SupModList[\s\r\n]*=[\s\r\n]*"[\s\r\n]*common'
+ '[\s\r\n]*"', '', XmlContent)
+
+ return XmlContent
+
+ return ''
+
+## ValidateMS
+#
+# Check if any required item is missing in ModuleSurfaceArea
+#
+# @param Module: The ModuleSurfaceArea to be checked
+# @param XmlTreeLevel: The top level of Module
+#
+def ValidateMS(Module, TopXmlTreeLevel):
+ ValidateMS1(Module, TopXmlTreeLevel)
+ ValidateMS2(Module, TopXmlTreeLevel)
+ ValidateMS3(Module, TopXmlTreeLevel)
+
+## ValidateMS1
+#
+# Check if any required item is missing in ModuleSurfaceArea
+#
+# @param Module: The ModuleSurfaceArea to be checked
+# @param XmlTreeLevel: The top level of Module
+#
+def ValidateMS1(Module, TopXmlTreeLevel):
+ #
+ # Check Guids -> GuidCName
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['Guids']
+ for Item in Module.GetGuidList():
+ if Item is None:
+ CheckDict = {'GuidCName':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = TopXmlTreeLevel + ['Guids', 'GuidCName']
+ for Item in Module.GetGuidList():
+ CheckDict = {'CName':Item.GetCName(),
+ 'GuidType':Item.GetGuidTypeList(),
+ 'Usage':Item.GetUsage()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ if Item.GetVariableName():
+ Result = ConvertVariableName(Item.GetVariableName())
+ if Result is None:
+ Msg = "->".join(Node for Node in XmlTreeLevel)
+ ErrorMsg = ERR_XML_INVALID_VARIABLENAME % (Item.GetVariableName(), Item.GetCName(), Msg)
+ Logger.Error('\nUPT', PARSER_ERROR, ErrorMsg, RaiseError=True)
+ else:
+ Item.SetVariableName(Result)
+
+ #
+ # Check Protocols -> Protocol
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['Protocols']
+ for Item in Module.GetProtocolList():
+ if Item is None:
+ CheckDict = {'Protocol':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = TopXmlTreeLevel + ['Protocols', 'Protocol']
+ for Item in Module.GetProtocolList():
+ CheckDict = {'CName':Item.GetCName(),
+ 'Usage':Item.GetUsage()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check PPIs -> Ppi
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['PPIs']
+ for Item in Module.GetPpiList():
+ if Item is None:
+ CheckDict = {'Ppi':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = TopXmlTreeLevel + ['PPIs', 'Ppi']
+ for Item in Module.GetPpiList():
+ CheckDict = {'CName':Item.GetCName(),
+ 'Usage':Item.GetUsage()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check PcdCoded -> Entry
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['PcdCoded']
+ for Item in Module.GetPcdList():
+ if Item is None:
+ CheckDict = {'PcdEntry':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = TopXmlTreeLevel + ['PcdCoded', 'PcdEntry']
+ for Item in Module.GetPcdList():
+ CheckDict = {'TokenSpaceGuidCname':Item.GetTokenSpaceGuidCName(),
+ 'CName':Item.GetCName(),
+ 'PcdUsage':Item.GetValidUsage(),
+ 'PcdItemType':Item.GetItemType()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check Externs -> Extern
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['Externs']
+ for Item in Module.GetExternList():
+ if Item is None:
+ CheckDict = {'Extern':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # If SupArchList is used to identify different EntryPoint, UnloadImage, Constructor/Destructor elements and
+ # that SupArchList does not match ModuleSurfaceArea.ModuleProperties:SupArchList, the tool must exit gracefully,
+ # informing the user that the EDK II Build system does not support different EntryPoint, UnloadImage,
+ # Constructor or Destructor elements based on Architecture type. Two SupArchList attributes are considered
+ # identical if it lists the same CPU architectures in any order.
+ #
+ for Item in Module.GetExternList():
+ if len(Item.SupArchList) > 0:
+ if not IsEqualList(Item.SupArchList, Module.SupArchList):
+ Logger.Error('\nUPT',
+ PARSER_ERROR,
+ ERR_XML_INVALID_EXTERN_SUPARCHLIST % (str(Item.SupArchList), str(Module.SupArchList)),
+ RaiseError=True)
+
+ #
+ # Check DistributionPackage -> ModuleSurfaceArea -> UserExtensions
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['UserExtensions']
+ for Item in Module.GetUserExtensionList():
+ CheckDict = {'UserId':Item.GetUserID(), 'Identifier':Item.GetIdentifier()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check DistributionPackage -> PackageSurfaceArea -> MiscellaneousFiles -> Filename
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['MiscellaneousFiles']
+ for Item in Module.GetMiscFileList():
+ if not Item.GetFileList():
+ CheckDict = {'Filename': '', }
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ for File in Item.GetFileList():
+ CheckDict = {'Filename': File.GetURI(), }
+
+## ValidateMS2
+#
+# Check if any required item is missing in ModuleSurfaceArea
+#
+# @param Module: The ModuleSurfaceArea to be checked
+# @param XmlTreeLevel: The top level of Module
+#
+def ValidateMS2(Module, TopXmlTreeLevel):
+ #
+ # Check Header
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['Header']
+ CheckDict = Sdict()
+ CheckDict['Name'] = Module.GetName()
+ CheckDict['BaseName'] = Module.GetBaseName()
+ CheckDict['GUID'] = Module.GetGuid()
+ CheckDict['Version'] = Module.GetVersion()
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check ModuleProperties
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['ModuleProperties']
+ CheckDict = {'ModuleType':Module.GetModuleType(),
+ 'Path':Module.GetModulePath()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ if not IsValidInstallPath(Module.GetModulePath()):
+ Logger.Error("UPT", FORMAT_INVALID, ERR_FILE_NAME_INVALIDE % Module.GetModulePath())
+
+ #
+ # Check ModuleProperties->BootMode
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['ModuleProperties'] + ['BootMode']
+ for Item in Module.GetBootModeList():
+ CheckDict = {'Usage':Item.GetUsage(),
+ 'SupportedBootModes':Item.GetSupportedBootModes()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check ModuleProperties->Event
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['ModuleProperties'] + ['Event']
+ for Item in Module.GetEventList():
+ CheckDict = {'Usage':Item.GetUsage(),
+ 'EventType':Item.GetEventType()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check ModuleProperties->Hob
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['ModuleProperties'] + ['HOB']
+ for Item in Module.GetHobList():
+ CheckDict = {'Usage':Item.GetUsage(),
+ 'HobType':Item.GetHobType()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # The UDP Specification supports the module type of UEFI_RUNTIME_DRIVER, which is not present in the EDK II INF
+ # File Specification v. 1.23, so UPT must perform the following translation that include the generation of a
+ # [Depex] section.
+ #
+ if Module.ModuleType == "UEFI_RUNTIME_DRIVER":
+ Module.ModuleType = "DXE_RUNTIME_DRIVER"
+ DxeObj = DepexObject()
+ DxeObj.SetDepex("gEfiBdsArchProtocolGuid AND \ngEfiCpuArchProtocolGuid AND\n" + \
+ "gEfiMetronomeArchProtocolGuid AND \ngEfiMonotonicCounterArchProtocolGuid AND\n" + \
+ "gEfiRealTimeClockArchProtocolGuid AND \ngEfiResetArchProtocolGuid AND\n" + \
+ "gEfiRuntimeArchProtocolGuid AND \ngEfiSecurityArchProtocolGuid AND\n" + \
+ "gEfiTimerArchProtocolGuid AND \ngEfiVariableWriteArchProtocolGuid AND\n" + \
+ "gEfiVariableArchProtocolGuid AND \ngEfiWatchdogTimerArchProtocolGuid")
+ DxeObj.SetModuleType(['DXE_RUNTIME_DRIVER'])
+ Module.PeiDepex = []
+ Module.DxeDepex = []
+ Module.SmmDepex = []
+ Module.DxeDepex.append(DxeObj)
+
+ #
+ # Check LibraryClassDefinitions -> LibraryClass
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['LibraryClassDefinitions']
+ for Item in Module.GetLibraryClassList():
+ if Item is None:
+ CheckDict = {'LibraryClass':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = TopXmlTreeLevel + ['LibraryClassDefinitions', 'LibraryClass']
+
+ IsLibraryModule = False
+ LibrarySupModList = []
+ for Item in Module.GetLibraryClassList():
+ CheckDict = {'Keyword':Item.GetLibraryClass(),
+ 'Usage':Item.GetUsage()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ #
+ # If the LibraryClass:SupModList is not "UNDEFINED" the LIBRARY_CLASS entry must have the list
+ # appended using the format:
+ # LIBRARY_CLASS = <ClassName> ["|" <Edk2ModuleTypeList>]
+ #
+ # Edk2ModuleTypeList ::= <ModuleType> [" " <ModuleType>]{0,}
+ # <ModuleTypes> ::= {"BASE"} {"SEC"} {"PEI_CORE"} {"PEIM"}
+ # {"DXE_CORE"} {"DXE_DRIVER"} {"SMM_CORE"}
+ # {"DXE_SMM_DRIVER"} {"DXE_RUNTIME_DRIVER"}
+ # {"DXE_SAL_DRIVER"} {"UEFI_DRIVER"}
+ # {"UEFI_APPLICATION"} {"USER_DEFINED"}
+ #
+ if len(Item.SupModuleList) > 0:
+ for SupModule in Item.SupModuleList:
+ if not IsValidInfMoudleType(SupModule):
+ Logger.Error('\nUPT',
+ PARSER_ERROR,
+ ERR_XML_INVALID_LIB_SUPMODLIST % (Item.LibraryClass, str(SupModule)),
+ RaiseError=True)
+
+ if Item.Usage == 'PRODUCES' or Item.Usage == 'SOMETIMES_PRODUCES':
+ IsLibraryModule = True
+ LibrarySupModList = Item.SupModuleList
+
+
+ #
+ # For Library modules (indicated by a LIBRARY_CLASS statement in the [Defines] section)
+ # If the SupModList attribute of the CONSTRUCTOR or DESTRUCTOR element does not match the Supported Module
+ # Types listed after "LIBRARY_CLASS = <Keyword> |", the tool should gracefully exit with an error message
+ # stating that there is a conflict in the module types the CONSTRUCTOR/DESTRUCTOR is to be used with and
+ # the Module types this Library supports.
+ #
+ if IsLibraryModule:
+ for Item in Module.GetExternList():
+ if Item.Constructor or Item.Destructor:
+ if hasattr(Item, 'SupModList') and len(Item.SupModList) > 0 and \
+ not IsEqualList(Item.SupModList, LibrarySupModList):
+ Logger.Error('\nUPT',
+ PARSER_ERROR,
+ ERR_XML_INVALID_EXTERN_SUPMODLIST % (str(Item.SupModList), str(LibrarySupModList)),
+ RaiseError=True)
+
+ #
+ # If the module is not a library module, the MODULE_TYPE listed in the ModuleSurfaceArea.Header must match the
+ # SupModList attribute. If these conditions cannot be met, the tool must exit gracefully, informing the user
+ # that the EDK II Build system does not currently support the features required by this Module.
+ #
+ if not IsLibraryModule:
+ for Item in Module.GetExternList():
+ if hasattr(Item, 'SupModList') and len(Item.SupModList) > 0 and \
+ not IsEqualList(Item.SupModList, [Module.ModuleType]):
+ Logger.Error('\nUPT',
+ PARSER_ERROR,
+ ERR_XML_INVALID_EXTERN_SUPMODLIST_NOT_LIB % (str(Module.ModuleType), str(Item.SupModList)),
+ RaiseError=True)
+ #
+ # Check SourceFiles
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['SourceFiles']
+ for Item in Module.GetSourceFileList():
+ if Item is None:
+ CheckDict = {'Filename':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = TopXmlTreeLevel + ['SourceFiles']
+ for Item in Module.GetSourceFileList():
+ CheckDict = {'Filename':Item.GetSourceFile()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ for ItemCount in range(len(Module.GetBinaryFileList())):
+ Item = Module.GetBinaryFileList()[ItemCount]
+ if Item and len(Item.FileNamList) > 0 and Item.FileNamList[0].FileType == 'FREEFORM':
+ Item.FileNamList[0].FileType = 'SUBTYPE_GUID'
+ Module.GetBinaryFileList()[ItemCount] = Item
+
+## ValidateMS3
+#
+# Check if any required item is missing in ModuleSurfaceArea
+#
+# @param Module: The ModuleSurfaceArea to be checked
+# @param XmlTreeLevel: The top level of Module
+#
+def ValidateMS3(Module, TopXmlTreeLevel):
+ #
+ # Check PackageDependencies -> Package
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['PackageDependencies']
+ for Item in Module.GetPackageDependencyList():
+ if Item is None:
+ CheckDict = {'Package':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = TopXmlTreeLevel + ['PackageDependencies', 'Package']
+ for Item in Module.GetPackageDependencyList():
+ CheckDict = {'GUID':Item.GetGuid()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check BinaryFiles -> BinaryFile
+ #
+ for Item in Module.GetBinaryFileList():
+ if Item is None:
+ XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles']
+ CheckDict = {'BinaryFile':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ if not Item.GetFileNameList():
+ XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile']
+ CheckDict = {'Filename':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile']
+ for File in Item.GetFileNameList():
+ CheckDict = {'Filename':File.GetFilename(),
+ 'FileType':File.GetFileType()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ for AsBuilt in Item.GetAsBuiltList():
+ #
+ # Check LibInstance
+ #
+ if len(AsBuilt.LibraryInstancesList) == 1 and not AsBuilt.LibraryInstancesList[0]:
+ CheckDict = {'GUID':''}
+ XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile', 'AsBuilt', 'LibraryInstances']
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ for LibItem in AsBuilt.LibraryInstancesList:
+ CheckDict = {'Guid':LibItem.Guid,
+ 'Version':LibItem.Version}
+ XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile', 'AsBuilt', 'LibraryInstances']
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check PatchPcd
+ #
+ for PatchPcdItem in AsBuilt.PatchPcdList:
+ CheckDict = {'TokenSpaceGuidValue':PatchPcdItem.TokenSpaceGuidValue,
+ 'PcdCName':PatchPcdItem.PcdCName,
+ 'Token':PatchPcdItem.Token,
+ 'DatumType':PatchPcdItem.DatumType,
+ 'Value':PatchPcdItem.DefaultValue,
+ 'Offset':PatchPcdItem.Offset}
+ XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile', 'AsBuilt', 'PatchPcdValue']
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ #
+ # Check PcdError
+ #
+ for PcdErrorItem in PatchPcdItem.PcdErrorsList:
+ CheckDict = {'ErrorNumber':PcdErrorItem.ErrorNumber}
+ XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile', 'AsBuilt',
+ 'PatchPcdValue', 'PcdError']
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ #
+ # Check PcdEx
+ #
+ for PcdExItem in AsBuilt.PcdExValueList:
+ CheckDict = {'TokenSpaceGuidValue':PcdExItem.TokenSpaceGuidValue,
+ 'Token':PcdExItem.Token,
+ 'DatumType':PcdExItem.DatumType}
+ XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile', 'AsBuilt', 'PcdExValue']
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ #
+ # Check PcdError
+ #
+ for PcdErrorItem in PcdExItem.PcdErrorsList:
+ CheckDict = {'ErrorNumber':PcdErrorItem.ErrorNumber}
+ XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile', 'AsBuilt',
+ 'PcdExValue', 'PcdError']
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ #
+ # Check SmmDepex
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['SmmDepex']
+ for Item in Module.GetSmmDepex():
+ CheckDict = {'Expression':Item.GetDepex()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check PeiDepex
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['PeiDepex']
+ for Item in Module.GetPeiDepex():
+ CheckDict = {'Expression':Item.GetDepex()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check DxeDepex
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['DxeDepex']
+ for Item in Module.GetDxeDepex():
+ CheckDict = {'Expression':Item.GetDepex()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check <UserExtensions>
+ #
+ XmlTreeLevel = TopXmlTreeLevel + ['UserExtensions']
+ for Item in Module.GetUserExtensionList():
+ CheckDict = {'UserId':Item.GetUserID(), 'Identifier':Item.GetIdentifier()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+## ValidatePS1
+#
+# ValidatePS1
+#
+def ValidatePS1(Package):
+ #
+ # Check DistributionPackage -> PackageSurfaceArea -> Header
+ #
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'Header']
+ CheckDict = Sdict()
+ CheckDict['Name'] = Package.GetName()
+ CheckDict['BaseName'] = Package.GetBaseName()
+ CheckDict['GUID'] = Package.GetGuid()
+ CheckDict['Version'] = Package.GetVersion()
+ CheckDict['PackagePath'] = Package.GetPackagePath()
+
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ if not IsValidInstallPath(Package.GetPackagePath()):
+ Logger.Error("UPT", FORMAT_INVALID, ERR_FILE_NAME_INVALIDE % Package.GetPackagePath())
+
+ #
+ # Check DistributionPackage -> PackageSurfaceArea -> ClonedFrom
+ #
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'ClonedFrom']
+ for Item in Package.GetClonedFromList():
+ if Item is None:
+ CheckDict = Sdict()
+ CheckDict['GUID'] = ''
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ CheckDict = Sdict()
+ CheckDict['GUID'] = Item.GetPackageGuid()
+ CheckDict['Version'] = Item.GetPackageVersion()
+
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check DistributionPackage -> PackageSurfaceArea -> LibraryClassDeclarations -> LibraryClass
+ #
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'LibraryClassDeclarations']
+ for Item in Package.GetLibraryClassList():
+ if Item is None:
+ CheckDict = {'LibraryClass':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'LibraryClassDeclarations', 'LibraryClass']
+ for Item in Package.GetLibraryClassList():
+ CheckDict = {'Keyword':Item.GetLibraryClass(),
+ 'HeaderFile':Item.GetIncludeHeader()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check DistributionPackage -> PackageSurfaceArea -> IndustryStandardIncludes -> IndustryStandardHeader
+ #
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'IndustryStandardIncludes']
+ for Item in Package.GetStandardIncludeFileList():
+ if Item is None:
+ CheckDict = {'IndustryStandardHeader':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'IndustryStandardIncludes', 'IndustryStandardHeader']
+ for Item in Package.GetStandardIncludeFileList():
+ CheckDict = {'HeaderFile':Item.GetFilePath()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check DistributionPackage -> PackageSurfaceArea -> PackageIncludes -> PackageHeader
+ #
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PackageIncludes']
+ for Item in Package.GetPackageIncludeFileList():
+ if Item is None:
+ CheckDict = {'PackageHeader':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PackageIncludes', 'PackageHeader']
+ for Item in Package.GetPackageIncludeFileList():
+ CheckDict = {'HeaderFile':Item.GetFilePath()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+## ValidatePS2
+#
+# ValidatePS2
+#
+def ValidatePS2(Package):
+ #
+ # Check DistributionPackage -> PackageSurfaceArea -> Modules -> ModuleSurfaceArea
+ #
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'Modules', 'ModuleSurfaceArea']
+ for Item in Package.GetModuleDict().values():
+ ValidateMS(Item, XmlTreeLevel)
+
+ #
+ # Check DistributionPackage -> PackageSurfaceArea -> GuidDeclarations Entry
+ #
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'GuidDeclarations']
+ for Item in Package.GetGuidList():
+ if Item is None:
+ CheckDict = {'Entry':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'GuidDeclarations', 'Entry']
+ for Item in Package.GetGuidList():
+ CheckDict = {'CName':Item.GetCName(),
+ 'GuidValue':Item.GetGuid()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check DistributionPackage -> PackageSurfaceArea -> ProtocolDeclarations -> Entry
+ #
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'ProtocolDeclarations']
+ for Item in Package.GetProtocolList():
+ if Item is None:
+ CheckDict = {'Entry':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'ProtocolDeclarations', 'Entry']
+ for Item in Package.GetProtocolList():
+ CheckDict = {'CName':Item.GetCName(),
+ 'GuidValue':Item.GetGuid()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check DistributionPackage -> PackageSurfaceArea -> PpiDeclarations -> Entry
+ #
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PpiDeclarations']
+ for Item in Package.GetPpiList():
+ if Item is None:
+ CheckDict = {'Entry':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PpiDeclarations', 'Entry']
+ for Item in Package.GetPpiList():
+ CheckDict = {'CName':Item.GetCName(),
+ 'GuidValue':Item.GetGuid()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check DistributionPackage -> PackageSurfaceArea -> PcdDeclarations -> Entry
+ #
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PcdDeclarations']
+ for Item in Package.GetPcdList():
+ if Item is None:
+ CheckDict = {'PcdEntry':''}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PcdDeclarations', 'PcdEntry']
+ for Item in Package.GetPcdList():
+ CheckDict = {'TokenSpaceGuidCname':Item.GetTokenSpaceGuidCName(),
+ 'Token':Item.GetToken(),
+ 'CName':Item.GetCName(),
+ 'DatumType':Item.GetDatumType(),
+ 'ValidUsage':Item.GetValidUsage(),
+ 'DefaultValue':Item.GetDefaultValue()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check DistributionPackage -> PackageSurfaceArea -> UserExtensions
+ #
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'UserExtensions']
+ for Item in Package.GetUserExtensionList():
+ CheckDict = {'UserId':Item.GetUserID(), 'Identifier':Item.GetIdentifier()}
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+ #
+ # Check DistributionPackage -> PackageSurfaceArea -> MiscellaneousFiles -> Filename
+ #
+ XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'MiscellaneousFiles']
+ for Item in Package.GetMiscFileList():
+ if not Item.GetFileList():
+ CheckDict = {'Filename': '', }
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+ for File in Item.GetFileList():
+ CheckDict = {'Filename': File.GetURI(), }
+ IsRequiredItemListNull(CheckDict, XmlTreeLevel)
+
+## ValidatePackageSurfaceArea
+#
+# Check if any required item is missing in PackageSurfaceArea
+#
+# @param Package: The PackageSurfaceArea to be checked
+#
+def ValidatePackageSurfaceArea(Package):
+ ValidatePS1(Package)
+ ValidatePS2(Package)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/XmlParserMisc.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/XmlParserMisc.py
new file mode 100755
index 00000000..4d495ae9
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/XmlParserMisc.py
@@ -0,0 +1,95 @@
+## @file
+# This file is used to parse a xml file of .PKG file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+XmlParserMisc
+'''
+from Object.POM.CommonObject import TextObject
+from Logger.StringTable import ERR_XML_PARSER_REQUIRED_ITEM_MISSING
+from Logger.ToolError import PARSER_ERROR
+import Logger.Log as Logger
+
+## ConvertVariableName()
+# Convert VariableName to be L"string",
+# input of UCS-2 format Hex Array or L"string" (C style.) could be converted successfully,
+# others will not.
+#
+# @param VariableName: string need to be converted
+# @retval: the L quoted string converted if success, else None will be returned
+#
+def ConvertVariableName(VariableName):
+ VariableName = VariableName.strip()
+ #
+ # check for L quoted string
+ #
+ if VariableName.startswith('L"') and VariableName.endswith('"'):
+ return VariableName
+
+ #
+ # check for Hex Array, it should be little endian even number of hex numbers
+ #
+ ValueList = VariableName.split(' ')
+ if len(ValueList)%2 == 1:
+ return None
+
+ TransferedStr = ''
+
+ Index = 0
+
+ while Index < len(ValueList):
+ FirstByte = int(ValueList[Index], 16)
+ SecondByte = int(ValueList[Index + 1], 16)
+ if SecondByte != 0:
+ return None
+
+ if FirstByte not in range(0x20, 0x7F):
+ return None
+ TransferedStr += ('%c')%FirstByte
+ Index = Index + 2
+
+ return 'L"' + TransferedStr + '"'
+
+## IsRequiredItemListNull
+#
+# Check if a required XML section item/attribue is NULL
+#
+# @param ItemList: The list of items to be checked
+# @param XmlTreeLevel: The error message tree level
+#
+def IsRequiredItemListNull(ItemDict, XmlTreeLevel):
+ for Key in ItemDict:
+ if not ItemDict[Key]:
+ Msg = "->".join(Node for Node in XmlTreeLevel)
+ ErrorMsg = ERR_XML_PARSER_REQUIRED_ITEM_MISSING % (Key, Msg)
+ Logger.Error('\nUPT', PARSER_ERROR, ErrorMsg, RaiseError=True)
+
+## Get help text
+#
+# @param HelpText
+#
+def GetHelpTextList(HelpText):
+ HelpTextList = []
+ for HelT in HelpText:
+ HelpTextObj = TextObject()
+ HelpTextObj.SetLang(HelT.Lang)
+ HelpTextObj.SetString(HelT.HelpText)
+ HelpTextList.append(HelpTextObj)
+ return HelpTextList
+
+## Get Prompt text
+#
+# @param Prompt
+#
+def GetPromptList(Prompt):
+ PromptList = []
+ for SubPrompt in Prompt:
+ PromptObj = TextObject()
+ PromptObj.SetLang(SubPrompt.Lang)
+ PromptObj.SetString(SubPrompt.Prompt)
+ PromptList.append(PromptObj)
+ return PromptList
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/__init__.py
new file mode 100644
index 00000000..a800a619
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/__init__.py
@@ -0,0 +1,14 @@
+## @file
+# Python 'Library' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+Xml
+'''
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/BuildClassObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/BuildClassObject.py
new file mode 100755
index 00000000..a4be167d
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/BuildClassObject.py
@@ -0,0 +1,646 @@
+## @file
+# This file is used to define each component of the build database
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from collections import OrderedDict, namedtuple
+from Common.DataType import *
+import collections
+import re
+from collections import OrderedDict
+from Common.Misc import CopyDict,ArrayIndex
+import copy
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import OPTION_VALUE_INVALID
+from Common.caching import cached_property
+StructPattern = re.compile(r'[_a-zA-Z][0-9A-Za-z_\[\]]*$')
+
+## PcdClassObject
+#
+# This Class is used for PcdObject
+#
+# @param object: Inherited from object class
+# @param Name: Input value for Name of Pcd, default is None
+# @param Guid: Input value for Guid of Pcd, default is None
+# @param Type: Input value for Type of Pcd, default is None
+# @param DatumType: Input value for DatumType of Pcd, default is None
+# @param Value: Input value for Value of Pcd, default is None
+# @param Token: Input value for Token of Pcd, default is None
+# @param MaxDatumSize: Input value for MaxDatumSize of Pcd, default is None
+# @param SkuInfoList: Input value for SkuInfoList of Pcd, default is {}
+# @param IsOverrided: Input value for IsOverrided of Pcd, default is False
+# @param GuidValue: Input value for TokenSpaceGuidValue of Pcd, default is None
+#
+# @var TokenCName: To store value for TokenCName
+# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName
+# @var Type: To store value for Type
+# @var DatumType: To store value for DatumType
+# @var TokenValue: To store value for TokenValue
+# @var MaxDatumSize: To store value for MaxDatumSize
+# @var SkuInfoList: To store value for SkuInfoList
+# @var IsOverrided: To store value for IsOverrided
+# @var Phase: To store value for Phase, default is "DXE"
+#
+class PcdClassObject(object):
+ def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = None, IsOverrided = False, GuidValue = None, validateranges = None, validlists = None, expressions = None, IsDsc = False, UserDefinedDefaultStoresFlag = False):
+ self.TokenCName = Name
+ self.TokenSpaceGuidCName = Guid
+ self.TokenSpaceGuidValue = GuidValue
+ self.Type = Type
+ self._DatumType = DatumType
+ self.DefaultValue = Value
+ self.TokenValue = Token
+ self.MaxDatumSize = MaxDatumSize
+ self.MaxSizeUserSet = None
+ self.SkuInfoList = SkuInfoList if SkuInfoList is not None else OrderedDict()
+ self.Phase = "DXE"
+ self.Pending = False
+ self.IsOverrided = IsOverrided
+ self.IsFromBinaryInf = False
+ self.IsFromDsc = False
+ self.validateranges = validateranges if validateranges is not None else []
+ self.validlists = validlists if validlists is not None else []
+ self.expressions = expressions if expressions is not None else []
+ self.DscDefaultValue = None
+ self.DscRawValue = {}
+ self.DscRawValueInfo = {}
+ if IsDsc:
+ self.DscDefaultValue = Value
+ self.PcdValueFromComm = ""
+ self.PcdValueFromFdf = ""
+ self.PcdValueFromComponents = {} #{ModuleGuid:value, file_path,lineNo}
+ self.CustomAttribute = {}
+ self.UserDefinedDefaultStoresFlag = UserDefinedDefaultStoresFlag
+ self._Capacity = None
+
+ @property
+ def Capacity(self):
+ if self._Capacity is None:
+ self._Capacity = []
+ dimension = ArrayIndex.findall(self._DatumType)
+ for item in dimension:
+ maxsize = item.lstrip("[").rstrip("]").strip()
+ if not maxsize:
+ maxsize = "-1"
+ maxsize = str(int(maxsize,16)) if maxsize.startswith(("0x","0X")) else maxsize
+ self._Capacity.append(maxsize)
+ if hasattr(self, "SkuOverrideValues"):
+ for sku in self.SkuOverrideValues:
+ for defaultstore in self.SkuOverrideValues[sku]:
+ fields = self.SkuOverrideValues[sku][defaultstore]
+ for demesionattr in fields:
+ fieldinfo = fields[demesionattr]
+ deme = ArrayIndex.findall(demesionattr)
+ for i in range(len(deme)):
+ if int(deme[i].lstrip("[").rstrip("]").strip()) >= int(self._Capacity[i]):
+ if self._Capacity[i] != "-1":
+ firstfieldinfo = list(fieldinfo.values())[0]
+ EdkLogger.error('Build', OPTION_VALUE_INVALID, "For Pcd %s, Array Index exceed the Array size. From %s Line %s \n " %
+ (".".join((self.TokenSpaceGuidCName, self.TokenCName)), firstfieldinfo[1],firstfieldinfo[2] ))
+ if hasattr(self,"DefaultValues"):
+ for demesionattr in self.DefaultValues:
+ fieldinfo = self.DefaultValues[demesionattr]
+ deme = ArrayIndex.findall(demesionattr)
+ for i in range(len(deme)):
+ if int(deme[i].lstrip("[").rstrip("]").strip()) >= int(self._Capacity[i]):
+ if self._Capacity[i] != "-1":
+ firstfieldinfo = list(fieldinfo.values())[0]
+ EdkLogger.error('Build', OPTION_VALUE_INVALID, "For Pcd %s, Array Index exceed the Array size. From %s Line %s \n " %
+ (".".join((self.TokenSpaceGuidCName, self.TokenCName)), firstfieldinfo[1],firstfieldinfo[2] ))
+ return self._Capacity
+
+ def PcdArraySize(self):
+ if self.Capacity[-1] == "-1":
+ return -1
+ size = 1
+ for de in self.Capacity:
+ size = size * int(de)
+ return size
+ @property
+ def DatumType(self):
+ return self._DatumType
+
+ @DatumType.setter
+ def DatumType(self,DataType):
+ self._DatumType = DataType
+ self._Capacity = None
+
+ @property
+ def BaseDatumType(self):
+ if self.IsArray():
+ return self._DatumType[:self._DatumType.index("[")]
+ else:
+ return self._DatumType
+ def IsArray(self):
+ return True if len(self.Capacity) else False
+
+ def IsAggregateDatumType(self):
+ if self.DatumType in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
+ return False
+ if self.IsArray() or StructPattern.match(self.DatumType):
+ return True
+ return False
+
+ def IsSimpleTypeArray(self):
+ if self.IsArray() and self.BaseDatumType in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, "BOOLEAN"]:
+ return True
+ return False
+
+ @staticmethod
+ def GetPcdMaxSizeWorker(PcdString, MaxSize):
+ if PcdString.startswith("{") and PcdString.endswith("}"):
+ return max([len(PcdString.split(",")),MaxSize])
+
+ if PcdString.startswith("\"") or PcdString.startswith("\'"):
+ return max([len(PcdString)-2+1,MaxSize])
+
+ if PcdString.startswith("L\""):
+ return max([2*(len(PcdString)-3+1),MaxSize])
+
+ return max([len(PcdString),MaxSize])
+
+ ## Get the maximum number of bytes
+ def GetPcdMaxSize(self):
+ if self.DatumType in TAB_PCD_NUMERIC_TYPES:
+ return MAX_SIZE_TYPE[self.DatumType]
+
+ MaxSize = int(self.MaxDatumSize, 10) if self.MaxDatumSize else 0
+ if self.PcdValueFromFdf:
+ MaxSize = self.GetPcdMaxSizeWorker(self.PcdValueFromFdf,MaxSize)
+ if self.PcdValueFromComm:
+ MaxSize = self.GetPcdMaxSizeWorker(self.PcdValueFromComm,MaxSize)
+ if hasattr(self, "DefaultValueFromDec"):
+ MaxSize = self.GetPcdMaxSizeWorker(self.DefaultValueFromDec,MaxSize)
+ return MaxSize
+
+ ## Get the number of bytes
+ def GetPcdSize(self):
+ if self.DatumType in TAB_PCD_NUMERIC_TYPES:
+ return MAX_SIZE_TYPE[self.DatumType]
+ if not self.DefaultValue:
+ return 1
+ elif self.DefaultValue[0] == 'L':
+ return (len(self.DefaultValue) - 2) * 2
+ elif self.DefaultValue[0] == '{':
+ return len(self.DefaultValue.split(','))
+ else:
+ return len(self.DefaultValue) - 1
+
+
+ ## Convert the class to a string
+ #
+ # Convert each member of the class to string
+ # Organize to a single line format string
+ #
+ # @retval Rtn Formatted String
+ #
+ def __str__(self):
+ Rtn = '\tTokenCName=' + str(self.TokenCName) + ', ' + \
+ 'TokenSpaceGuidCName=' + str(self.TokenSpaceGuidCName) + ', ' + \
+ 'Type=' + str(self.Type) + ', ' + \
+ 'DatumType=' + str(self.DatumType) + ', ' + \
+ 'DefaultValue=' + str(self.DefaultValue) + ', ' + \
+ 'TokenValue=' + str(self.TokenValue) + ', ' + \
+ 'MaxDatumSize=' + str(self.MaxDatumSize) + ', '
+ for Item in self.SkuInfoList.values():
+ Rtn = Rtn + 'SkuId=' + Item.SkuId + ', ' + 'SkuIdName=' + Item.SkuIdName
+ Rtn = Rtn + ', IsOverrided=' + str(self.IsOverrided)
+
+ return Rtn
+
+ ## Override __eq__ function
+ #
+ # Check whether pcds are the same
+ #
+ # @retval False The two pcds are different
+ # @retval True The two pcds are the same
+ #
+ def __eq__(self, Other):
+ return Other and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
+
+ ## Override __hash__ function
+ #
+ # Use (TokenCName, TokenSpaceGuidCName) as key in hash table
+ #
+ # @retval truple() Key for hash table
+ #
+ def __hash__(self):
+ return hash((self.TokenCName, self.TokenSpaceGuidCName))
+
+ @cached_property
+ def _fullname(self):
+ return ".".join((self.TokenSpaceGuidCName,self.TokenCName))
+
+ def __lt__(self,pcd):
+ return self._fullname < pcd._fullname
+ def __gt__(self,pcd):
+ return self._fullname > pcd._fullname
+
+ def sharedcopy(self,new_pcd):
+ new_pcd.TokenCName = self.TokenCName
+ new_pcd.TokenSpaceGuidCName = self.TokenSpaceGuidCName
+ new_pcd.TokenSpaceGuidValue = self.TokenSpaceGuidValue
+ new_pcd.Type = self.Type
+ new_pcd.DatumType = self.DatumType
+ new_pcd.DefaultValue = self.DefaultValue
+ new_pcd.TokenValue = self.TokenValue
+ new_pcd.MaxDatumSize = self.MaxDatumSize
+ new_pcd.MaxSizeUserSet = self.MaxSizeUserSet
+
+ new_pcd.Phase = self.Phase
+ new_pcd.Pending = self.Pending
+ new_pcd.IsOverrided = self.IsOverrided
+ new_pcd.IsFromBinaryInf = self.IsFromBinaryInf
+ new_pcd.IsFromDsc = self.IsFromDsc
+ new_pcd.PcdValueFromComm = self.PcdValueFromComm
+ new_pcd.PcdValueFromFdf = self.PcdValueFromFdf
+ new_pcd.UserDefinedDefaultStoresFlag = self.UserDefinedDefaultStoresFlag
+ new_pcd.DscRawValue = self.DscRawValue
+ new_pcd.DscRawValueInfo = self.DscRawValueInfo
+ new_pcd.CustomAttribute = self.CustomAttribute
+ new_pcd.validateranges = [item for item in self.validateranges]
+ new_pcd.validlists = [item for item in self.validlists]
+ new_pcd.expressions = [item for item in self.expressions]
+ new_pcd.SkuInfoList = {key: copy.deepcopy(skuobj) for key,skuobj in self.SkuInfoList.items()}
+ return new_pcd
+
+ def __deepcopy__(self,memo):
+ new_pcd = PcdClassObject()
+ self.sharedcopy(new_pcd)
+ return new_pcd
+
+class StructurePcd(PcdClassObject):
+ def __init__(self, StructuredPcdIncludeFile=None, Packages=None, Name=None, Guid=None, Type=None, DatumType=None, Value=None, Token=None, MaxDatumSize=None, SkuInfoList=None, IsOverrided=False, GuidValue=None, validateranges=None, validlists=None, expressions=None,default_store = TAB_DEFAULT_STORES_DEFAULT):
+ if SkuInfoList is None:
+ SkuInfoList = {}
+ if validateranges is None:
+ validateranges = []
+ if validlists is None:
+ validlists = []
+ if expressions is None:
+ expressions = []
+ if Packages is None:
+ Packages = []
+ super(StructurePcd, self).__init__(Name, Guid, Type, DatumType, Value, Token, MaxDatumSize, SkuInfoList, IsOverrided, GuidValue, validateranges, validlists, expressions)
+ self.StructuredPcdIncludeFile = [] if StructuredPcdIncludeFile is None else StructuredPcdIncludeFile
+ self.PackageDecs = Packages
+ self.DefaultStoreName = [default_store]
+ self.DefaultValues = OrderedDict()
+ self.PcdMode = None
+ self.SkuOverrideValues = OrderedDict()
+ self.StructName = None
+ self.PcdDefineLineNo = 0
+ self.PkgPath = ""
+ self.DefaultValueFromDec = ""
+ self.DefaultValueFromDecInfo = None
+ self.ValueChain = set()
+ self.PcdFieldValueFromComm = OrderedDict()
+ self.PcdFieldValueFromFdf = OrderedDict()
+ self.DefaultFromDSC=None
+ self.PcdFiledValueFromDscComponent = OrderedDict()
+ def __repr__(self):
+ return self.TypeName
+
+ def AddDefaultValue (self, FieldName, Value, FileName="", LineNo=0,DimensionAttr ="-1"):
+ if DimensionAttr not in self.DefaultValues:
+ self.DefaultValues[DimensionAttr] = collections.OrderedDict()
+ if FieldName in self.DefaultValues[DimensionAttr]:
+ del self.DefaultValues[DimensionAttr][FieldName]
+ self.DefaultValues[DimensionAttr][FieldName] = [Value.strip(), FileName, LineNo]
+ return self.DefaultValues[DimensionAttr][FieldName]
+
+ def SetDecDefaultValue(self, DefaultValue,decpath=None,lineno=None):
+ self.DefaultValueFromDec = DefaultValue
+ self.DefaultValueFromDecInfo = (decpath,lineno)
+ def AddOverrideValue (self, FieldName, Value, SkuName, DefaultStoreName, FileName="", LineNo=0, DimensionAttr = '-1'):
+ if SkuName not in self.SkuOverrideValues:
+ self.SkuOverrideValues[SkuName] = OrderedDict()
+ if DefaultStoreName not in self.SkuOverrideValues[SkuName]:
+ self.SkuOverrideValues[SkuName][DefaultStoreName] = OrderedDict()
+ if DimensionAttr not in self.SkuOverrideValues[SkuName][DefaultStoreName]:
+ self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr] = collections.OrderedDict()
+ if FieldName in self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr]:
+ del self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr][FieldName]
+ self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr][FieldName] = [Value.strip(), FileName, LineNo]
+ return self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr][FieldName]
+
+ def AddComponentOverrideValue(self,FieldName, Value, ModuleGuid, FileName="", LineNo=0, DimensionAttr = '-1'):
+ self.PcdFiledValueFromDscComponent.setdefault(ModuleGuid, OrderedDict())
+ self.PcdFiledValueFromDscComponent[ModuleGuid].setdefault(DimensionAttr,OrderedDict())
+ self.PcdFiledValueFromDscComponent[ModuleGuid][DimensionAttr][FieldName] = [Value.strip(), FileName, LineNo]
+ return self.PcdFiledValueFromDscComponent[ModuleGuid][DimensionAttr][FieldName]
+
+ def SetPcdMode (self, PcdMode):
+ self.PcdMode = PcdMode
+
+ def copy(self, PcdObject):
+ self.TokenCName = PcdObject.TokenCName if PcdObject.TokenCName else self.TokenCName
+ self.TokenSpaceGuidCName = PcdObject.TokenSpaceGuidCName if PcdObject.TokenSpaceGuidCName else PcdObject.TokenSpaceGuidCName
+ self.TokenSpaceGuidValue = PcdObject.TokenSpaceGuidValue if PcdObject.TokenSpaceGuidValue else self.TokenSpaceGuidValue
+ self.Type = PcdObject.Type if PcdObject.Type else self.Type
+ self._DatumType = PcdObject.DatumType if PcdObject.DatumType else self.DatumType
+ self.DefaultValue = PcdObject.DefaultValue if PcdObject.DefaultValue else self.DefaultValue
+ self.TokenValue = PcdObject.TokenValue if PcdObject.TokenValue else self.TokenValue
+ self.MaxDatumSize = PcdObject.MaxDatumSize if PcdObject.MaxDatumSize else self.MaxDatumSize
+ self.SkuInfoList = PcdObject.SkuInfoList if PcdObject.SkuInfoList else self.SkuInfoList
+ self.Phase = PcdObject.Phase if PcdObject.Phase else self.Phase
+ self.Pending = PcdObject.Pending if PcdObject.Pending else self.Pending
+ self.IsOverrided = PcdObject.IsOverrided if PcdObject.IsOverrided else self.IsOverrided
+ self.IsFromBinaryInf = PcdObject.IsFromBinaryInf if PcdObject.IsFromBinaryInf else self.IsFromBinaryInf
+ self.IsFromDsc = PcdObject.IsFromDsc if PcdObject.IsFromDsc else self.IsFromDsc
+ self.validateranges = PcdObject.validateranges if PcdObject.validateranges else self.validateranges
+ self.validlists = PcdObject.validlists if PcdObject.validlists else self.validlists
+ self.expressions = PcdObject.expressions if PcdObject.expressions else self.expressions
+ self.DscRawValue = PcdObject.DscRawValue if PcdObject.DscRawValue else self.DscRawValue
+ self.DscRawValueInfo = PcdObject.DscRawValueInfo if PcdObject.DscRawValueInfo else self.DscRawValueInfo
+ self.PcdValueFromComm = PcdObject.PcdValueFromComm if PcdObject.PcdValueFromComm else self.PcdValueFromComm
+ self.PcdValueFromFdf = PcdObject.PcdValueFromFdf if PcdObject.PcdValueFromFdf else self.PcdValueFromFdf
+ self.CustomAttribute = PcdObject.CustomAttribute if PcdObject.CustomAttribute else self.CustomAttribute
+ self.UserDefinedDefaultStoresFlag = PcdObject.UserDefinedDefaultStoresFlag if PcdObject.UserDefinedDefaultStoresFlag else self.UserDefinedDefaultStoresFlag
+ if isinstance(PcdObject, StructurePcd):
+ self.StructuredPcdIncludeFile = PcdObject.StructuredPcdIncludeFile if PcdObject.StructuredPcdIncludeFile else self.StructuredPcdIncludeFile
+ self.PackageDecs = PcdObject.PackageDecs if PcdObject.PackageDecs else self.PackageDecs
+ self.DefaultValues = PcdObject.DefaultValues if PcdObject.DefaultValues else self.DefaultValues
+ self.PcdMode = PcdObject.PcdMode if PcdObject.PcdMode else self.PcdMode
+ self.DefaultValueFromDec = PcdObject.DefaultValueFromDec if PcdObject.DefaultValueFromDec else self.DefaultValueFromDec
+ self.DefaultValueFromDecInfo = PcdObject.DefaultValueFromDecInfo if PcdObject.DefaultValueFromDecInfo else self.DefaultValueFromDecInfo
+ self.SkuOverrideValues = PcdObject.SkuOverrideValues if PcdObject.SkuOverrideValues else self.SkuOverrideValues
+ self.StructName = PcdObject.DatumType if PcdObject.DatumType else self.StructName
+ self.PcdDefineLineNo = PcdObject.PcdDefineLineNo if PcdObject.PcdDefineLineNo else self.PcdDefineLineNo
+ self.PkgPath = PcdObject.PkgPath if PcdObject.PkgPath else self.PkgPath
+ self.ValueChain = PcdObject.ValueChain if PcdObject.ValueChain else self.ValueChain
+ self.PcdFieldValueFromComm = PcdObject.PcdFieldValueFromComm if PcdObject.PcdFieldValueFromComm else self.PcdFieldValueFromComm
+ self.PcdFieldValueFromFdf = PcdObject.PcdFieldValueFromFdf if PcdObject.PcdFieldValueFromFdf else self.PcdFieldValueFromFdf
+ self.PcdFiledValueFromDscComponent = PcdObject.PcdFiledValueFromDscComponent if PcdObject.PcdFiledValueFromDscComponent else self.PcdFiledValueFromDscComponent
+
+ def __deepcopy__(self,memo):
+ new_pcd = StructurePcd()
+ self.sharedcopy(new_pcd)
+
+ new_pcd.DefaultValueFromDec = self.DefaultValueFromDec
+ new_pcd.DefaultValueFromDecInfo = self.DefaultValueFromDecInfo
+ new_pcd.PcdMode = self.PcdMode
+ new_pcd.StructName = self.DatumType
+ new_pcd.PcdDefineLineNo = self.PcdDefineLineNo
+ new_pcd.PkgPath = self.PkgPath
+ new_pcd.StructuredPcdIncludeFile = [item for item in self.StructuredPcdIncludeFile]
+ new_pcd.PackageDecs = [item for item in self.PackageDecs]
+ new_pcd.DefaultValues = CopyDict(self.DefaultValues)
+ new_pcd.DefaultFromDSC=CopyDict(self.DefaultFromDSC)
+ new_pcd.SkuOverrideValues = CopyDict(self.SkuOverrideValues)
+ new_pcd.PcdFieldValueFromComm = CopyDict(self.PcdFieldValueFromComm)
+ new_pcd.PcdFieldValueFromFdf = CopyDict(self.PcdFieldValueFromFdf)
+ new_pcd.PcdFiledValueFromDscComponent = CopyDict(self.PcdFiledValueFromDscComponent)
+ new_pcd.ValueChain = {item for item in self.ValueChain}
+ return new_pcd
+
+LibraryClassObject = namedtuple('LibraryClassObject', ['LibraryClass','SupModList'])
+
+## ModuleBuildClassObject
+#
+# This Class defines ModuleBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var MetaFile: To store value for module meta file path
+# @var BaseName: To store value for BaseName
+# @var ModuleType: To store value for ModuleType
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var PcdIsDriver: To store value for PcdIsDriver
+# @var BinaryModule: To store value for BinaryModule
+# @var CustomMakefile: To store value for CustomMakefile
+# @var Specification: To store value for Specification
+# @var Shadow To store value for Shadow
+# @var LibraryClass: To store value for LibraryClass, it is a list structure as
+# [ LibraryClassObject, ...]
+# @var ModuleEntryPointList: To store value for ModuleEntryPointList
+# @var ModuleUnloadImageList: To store value for ModuleUnloadImageList
+# @var ConstructorList: To store value for ConstructorList
+# @var DestructorList: To store value for DestructorList
+# @var Binaries: To store value for Binaries, it is a list structure as
+# [ ModuleBinaryClassObject, ...]
+# @var Sources: To store value for Sources, it is a list structure as
+# [ ModuleSourceFilesClassObject, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { [LibraryClassName, ModuleType] : LibraryClassInfFile }
+# @var Protocols: To store value for Protocols, it is a list structure as
+# [ ProtocolName, ... ]
+# @var Ppis: To store value for Ppis, it is a list structure as
+# [ PpiName, ... ]
+# @var Guids: To store value for Guids, it is a list structure as
+# [ GuidName, ... ]
+# @var Includes: To store value for Includes, it is a list structure as
+# [ IncludePath, ... ]
+# @var Packages: To store value for Packages, it is a list structure as
+# [ DecFileName, ... ]
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
+# @var BuildOptions: To store value for BuildOptions, it is a set structure as
+# { [BuildOptionKey] : BuildOptionValue}
+# @var Depex: To store value for Depex
+#
+class ModuleBuildClassObject(object):
+ def __init__(self):
+ self.AutoGenVersion = 0
+ self.MetaFile = ''
+ self.BaseName = ''
+ self.ModuleType = ''
+ self.Guid = ''
+ self.Version = ''
+ self.PcdIsDriver = ''
+ self.BinaryModule = ''
+ self.Shadow = ''
+ self.CustomMakefile = {}
+ self.Specification = {}
+ self.LibraryClass = []
+ self.ModuleEntryPointList = []
+ self.ModuleUnloadImageList = []
+ self.ConstructorList = []
+ self.DestructorList = []
+
+ self.Binaries = []
+ self.Sources = []
+ self.LibraryClasses = OrderedDict()
+ self.Libraries = []
+ self.Protocols = []
+ self.Ppis = []
+ self.Guids = []
+ self.Includes = []
+ self.Packages = []
+ self.Pcds = {}
+ self.BuildOptions = {}
+ self.Depex = {}
+ self.StrPcdSet = []
+ self.StrPcdOverallValue = {}
+
+ ## Convert the class to a string
+ #
+ # Convert member MetaFile of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## Override __eq__ function
+ #
+ # Check whether ModuleBuildClassObjects are the same
+ #
+ # @retval False The two ModuleBuildClassObjects are different
+ # @retval True The two ModuleBuildClassObjects are the same
+ #
+ def __eq__(self, Other):
+ return self.MetaFile == Other
+
+ ## Override __hash__ function
+ #
+ # Use MetaFile as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
+
+## PackageBuildClassObject
+#
+# This Class defines PackageBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var MetaFile: To store value for package meta file path
+# @var PackageName: To store value for PackageName
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var Protocols: To store value for Protocols, it is a set structure as
+# { [ProtocolName] : Protocol Guid, ... }
+# @var Ppis: To store value for Ppis, it is a set structure as
+# { [PpiName] : Ppi Guid, ... }
+# @var Guids: To store value for Guids, it is a set structure as
+# { [GuidName] : Guid, ... }
+# @var Includes: To store value for Includes, it is a list structure as
+# [ IncludePath, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { [LibraryClassName] : LibraryClassInfFile }
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
+#
+class PackageBuildClassObject(object):
+ def __init__(self):
+ self.MetaFile = ''
+ self.PackageName = ''
+ self.Guid = ''
+ self.Version = ''
+
+ self.Protocols = {}
+ self.Ppis = {}
+ self.Guids = {}
+ self.Includes = []
+ self.LibraryClasses = {}
+ self.Pcds = {}
+
+ ## Convert the class to a string
+ #
+ # Convert member MetaFile of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## Override __eq__ function
+ #
+ # Check whether PackageBuildClassObjects are the same
+ #
+ # @retval False The two PackageBuildClassObjects are different
+ # @retval True The two PackageBuildClassObjects are the same
+ #
+ def __eq__(self, Other):
+ return self.MetaFile == Other
+
+ ## Override __hash__ function
+ #
+ # Use MetaFile as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
+
+## PlatformBuildClassObject
+#
+# This Class defines PlatformBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var MetaFile: To store value for platform meta-file path
+# @var PlatformName: To store value for PlatformName
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var DscSpecification: To store value for DscSpecification
+# @var OutputDirectory: To store value for OutputDirectory
+# @var FlashDefinition: To store value for FlashDefinition
+# @var BuildNumber: To store value for BuildNumber
+# @var MakefileName: To store value for MakefileName
+# @var SkuIds: To store value for SkuIds, it is a set structure as
+# { 'SkuName' : SkuId, '!include' : includefilename, ...}
+# @var Modules: To store value for Modules, it is a list structure as
+# [ InfFileName, ... ]
+# @var Libraries: To store value for Libraries, it is a list structure as
+# [ InfFileName, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { (LibraryClassName, ModuleType) : LibraryClassInfFile }
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject }
+# @var BuildOptions: To store value for BuildOptions, it is a set structure as
+# { [BuildOptionKey] : BuildOptionValue }
+#
+class PlatformBuildClassObject(object):
+ def __init__(self):
+ self.MetaFile = ''
+ self.PlatformName = ''
+ self.Guid = ''
+ self.Version = ''
+ self.DscSpecification = ''
+ self.OutputDirectory = ''
+ self.FlashDefinition = ''
+ self.BuildNumber = ''
+ self.MakefileName = ''
+
+ self.SkuIds = {}
+ self.Modules = []
+ self.LibraryInstances = []
+ self.LibraryClasses = {}
+ self.Libraries = {}
+ self.Pcds = {}
+ self.BuildOptions = {}
+
+ ## Convert the class to a string
+ #
+ # Convert member MetaFile of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## Override __eq__ function
+ #
+ # Check whether PlatformBuildClassObjects are the same
+ #
+ # @retval False The two PlatformBuildClassObjects are different
+ # @retval True The two PlatformBuildClassObjects are the same
+ #
+ def __eq__(self, Other):
+ return self.MetaFile == Other
+
+ ## Override __hash__ function
+ #
+ # Use MetaFile as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/DecBuildData.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/DecBuildData.py
new file mode 100755
index 00000000..78377286
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/DecBuildData.py
@@ -0,0 +1,475 @@
+## @file
+# This file is used to create a database used by build tool
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# (C) Copyright 2016 Hewlett Packard Enterprise Development LP<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from Common.StringUtils import *
+from Common.DataType import *
+from Common.Misc import *
+from types import *
+from collections import OrderedDict
+from CommonDataClass.DataClass import *
+from Workspace.BuildClassObject import PackageBuildClassObject, StructurePcd, PcdClassObject
+from Common.GlobalData import gGlobalDefines
+from re import compile
+
+## Platform build information from DEC file
+#
+# This class is used to retrieve information stored in database and convert them
+# into PackageBuildClassObject form for easier use for AutoGen.
+#
+class DecBuildData(PackageBuildClassObject):
+ # dict used to convert PCD type in database to string used by build tool
+ _PCD_TYPE_STRING_ = {
+ MODEL_PCD_FIXED_AT_BUILD : TAB_PCDS_FIXED_AT_BUILD,
+ MODEL_PCD_PATCHABLE_IN_MODULE : TAB_PCDS_PATCHABLE_IN_MODULE,
+ MODEL_PCD_FEATURE_FLAG : TAB_PCDS_FEATURE_FLAG,
+ MODEL_PCD_DYNAMIC : TAB_PCDS_DYNAMIC,
+ MODEL_PCD_DYNAMIC_DEFAULT : TAB_PCDS_DYNAMIC,
+ MODEL_PCD_DYNAMIC_HII : TAB_PCDS_DYNAMIC_HII,
+ MODEL_PCD_DYNAMIC_VPD : TAB_PCDS_DYNAMIC_VPD,
+ MODEL_PCD_DYNAMIC_EX : TAB_PCDS_DYNAMIC_EX,
+ MODEL_PCD_DYNAMIC_EX_DEFAULT : TAB_PCDS_DYNAMIC_EX,
+ MODEL_PCD_DYNAMIC_EX_HII : TAB_PCDS_DYNAMIC_EX_HII,
+ MODEL_PCD_DYNAMIC_EX_VPD : TAB_PCDS_DYNAMIC_EX_VPD,
+ }
+
+ # dict used to convert part of [Defines] to members of DecBuildData directly
+ _PROPERTY_ = {
+ #
+ # Required Fields
+ #
+ TAB_DEC_DEFINES_PACKAGE_NAME : "_PackageName",
+ TAB_DEC_DEFINES_PACKAGE_GUID : "_Guid",
+ TAB_DEC_DEFINES_PACKAGE_VERSION : "_Version",
+ TAB_DEC_DEFINES_PKG_UNI_FILE : "_PkgUniFile",
+ }
+
+
+ ## Constructor of DecBuildData
+ #
+ # Initialize object of DecBuildData
+ #
+ # @param FilePath The path of package description file
+ # @param RawData The raw data of DEC file
+ # @param BuildDataBase Database used to retrieve module information
+ # @param Arch The target architecture
+ # @param Platform (not used for DecBuildData)
+ # @param Macros Macros used for replacement in DSC file
+ #
+ def __init__(self, File, RawData, BuildDataBase, Arch=TAB_ARCH_COMMON, Target=None, Toolchain=None):
+ self.MetaFile = File
+ self._PackageDir = File.Dir
+ self._RawData = RawData
+ self._Bdb = BuildDataBase
+ self._Arch = Arch
+ self._Target = Target
+ self._Toolchain = Toolchain
+ self._Clear()
+
+ ## XXX[key] = value
+ def __setitem__(self, key, value):
+ self.__dict__[self._PROPERTY_[key]] = value
+
+ ## value = XXX[key]
+ def __getitem__(self, key):
+ return self.__dict__[self._PROPERTY_[key]]
+
+ ## "in" test support
+ def __contains__(self, key):
+ return key in self._PROPERTY_
+
+ ## Set all internal used members of DecBuildData to None
+ def _Clear(self):
+ self._Header = None
+ self._PackageName = None
+ self._Guid = None
+ self._Version = None
+ self._PkgUniFile = None
+ self._Protocols = None
+ self._Ppis = None
+ self._Guids = None
+ self._Includes = None
+ self._CommonIncludes = None
+ self._LibraryClasses = None
+ self._Pcds = None
+ self._MacroDict = None
+ self._PrivateProtocols = None
+ self._PrivatePpis = None
+ self._PrivateGuids = None
+ self._PrivateIncludes = None
+
+ ## Get current effective macros
+ @property
+ def _Macros(self):
+ if self._MacroDict is None:
+ self._MacroDict = dict(gGlobalDefines)
+ return self._MacroDict
+
+ ## Get architecture
+ @property
+ def Arch(self):
+ return self._Arch
+
+ ## Retrieve all information in [Defines] section
+ #
+ # (Retrieving all [Defines] information in one-shot is just to save time.)
+ #
+ def _GetHeaderInfo(self):
+ RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch]
+ for Record in RecordList:
+ Name = Record[1]
+ if Name in self:
+ self[Name] = Record[2]
+ self._Header = 'DUMMY'
+
+ ## Retrieve package name
+ @property
+ def PackageName(self):
+ if self._PackageName is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._PackageName is None:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_NAME", File=self.MetaFile)
+ return self._PackageName
+
+ ## Retrieve file guid
+ @property
+ def PackageName(self):
+ if self._Guid is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._Guid is None:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_GUID", File=self.MetaFile)
+ return self._Guid
+
+ ## Retrieve package version
+ @property
+ def Version(self):
+ if self._Version is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._Version is None:
+ self._Version = ''
+ return self._Version
+
+ ## Retrieve protocol definitions (name/value pairs)
+ @property
+ def Protocols(self):
+ if self._Protocols is None:
+ #
+ # tdict is a special kind of dict, used for selecting correct
+ # protocol definition for given ARCH
+ #
+ ProtocolDict = tdict(True)
+ PrivateProtocolDict = tdict(True)
+ NameList = []
+ PrivateNameList = []
+ PublicNameList = []
+ # find out all protocol definitions for specific and 'common' arch
+ RecordList = self._RawData[MODEL_EFI_PROTOCOL, self._Arch]
+ for Name, Guid, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
+ if PrivateFlag == 'PRIVATE':
+ if Name not in PrivateNameList:
+ PrivateNameList.append(Name)
+ PrivateProtocolDict[Arch, Name] = Guid
+ if Name in PublicNameList:
+ EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
+ else:
+ if Name not in PublicNameList:
+ PublicNameList.append(Name)
+ if Name in PrivateNameList:
+ EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
+ if Name not in NameList:
+ NameList.append(Name)
+ ProtocolDict[Arch, Name] = Guid
+ # use OrderedDict to keep the order
+ self._Protocols = OrderedDict()
+ self._PrivateProtocols = OrderedDict()
+ for Name in NameList:
+ #
+ # limit the ARCH to self._Arch, if no self._Arch found, tdict
+ # will automatically turn to 'common' ARCH for trying
+ #
+ self._Protocols[Name] = ProtocolDict[self._Arch, Name]
+ for Name in PrivateNameList:
+ self._PrivateProtocols[Name] = PrivateProtocolDict[self._Arch, Name]
+ return self._Protocols
+
+ ## Retrieve PPI definitions (name/value pairs)
+ @property
+ def Ppis(self):
+ if self._Ppis is None:
+ #
+ # tdict is a special kind of dict, used for selecting correct
+ # PPI definition for given ARCH
+ #
+ PpiDict = tdict(True)
+ PrivatePpiDict = tdict(True)
+ NameList = []
+ PrivateNameList = []
+ PublicNameList = []
+ # find out all PPI definitions for specific arch and 'common' arch
+ RecordList = self._RawData[MODEL_EFI_PPI, self._Arch]
+ for Name, Guid, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
+ if PrivateFlag == 'PRIVATE':
+ if Name not in PrivateNameList:
+ PrivateNameList.append(Name)
+ PrivatePpiDict[Arch, Name] = Guid
+ if Name in PublicNameList:
+ EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
+ else:
+ if Name not in PublicNameList:
+ PublicNameList.append(Name)
+ if Name in PrivateNameList:
+ EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
+ if Name not in NameList:
+ NameList.append(Name)
+ PpiDict[Arch, Name] = Guid
+ # use OrderedDict to keep the order
+ self._Ppis = OrderedDict()
+ self._PrivatePpis = OrderedDict()
+ for Name in NameList:
+ #
+ # limit the ARCH to self._Arch, if no self._Arch found, tdict
+ # will automatically turn to 'common' ARCH for trying
+ #
+ self._Ppis[Name] = PpiDict[self._Arch, Name]
+ for Name in PrivateNameList:
+ self._PrivatePpis[Name] = PrivatePpiDict[self._Arch, Name]
+ return self._Ppis
+
+ ## Retrieve GUID definitions (name/value pairs)
+ @property
+ def Guids(self):
+ if self._Guids is None:
+ #
+ # tdict is a special kind of dict, used for selecting correct
+ # GUID definition for given ARCH
+ #
+ GuidDict = tdict(True)
+ PrivateGuidDict = tdict(True)
+ NameList = []
+ PrivateNameList = []
+ PublicNameList = []
+ # find out all protocol definitions for specific and 'common' arch
+ RecordList = self._RawData[MODEL_EFI_GUID, self._Arch]
+ for Name, Guid, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
+ if PrivateFlag == 'PRIVATE':
+ if Name not in PrivateNameList:
+ PrivateNameList.append(Name)
+ PrivateGuidDict[Arch, Name] = Guid
+ if Name in PublicNameList:
+ EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
+ else:
+ if Name not in PublicNameList:
+ PublicNameList.append(Name)
+ if Name in PrivateNameList:
+ EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
+ if Name not in NameList:
+ NameList.append(Name)
+ GuidDict[Arch, Name] = Guid
+ # use OrderedDict to keep the order
+ self._Guids = OrderedDict()
+ self._PrivateGuids = OrderedDict()
+ for Name in NameList:
+ #
+ # limit the ARCH to self._Arch, if no self._Arch found, tdict
+ # will automatically turn to 'common' ARCH for trying
+ #
+ self._Guids[Name] = GuidDict[self._Arch, Name]
+ for Name in PrivateNameList:
+ self._PrivateGuids[Name] = PrivateGuidDict[self._Arch, Name]
+ return self._Guids
+
+ ## Retrieve public include paths declared in this package
+ @property
+ def Includes(self):
+ if self._Includes is None or self._CommonIncludes is None:
+ self._CommonIncludes = []
+ self._Includes = []
+ self._PrivateIncludes = []
+ PublicInclues = []
+ RecordList = self._RawData[MODEL_EFI_INCLUDE, self._Arch]
+ Macros = self._Macros
+ for Record in RecordList:
+ File = PathClass(NormPath(Record[0], Macros), self._PackageDir, Arch=self._Arch)
+ LineNo = Record[-1]
+ # validate the path
+ ErrorCode, ErrorInfo = File.Validate()
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
+
+ # avoid duplicate include path
+ if File not in self._Includes:
+ self._Includes.append(File)
+ if Record[4] == 'PRIVATE':
+ if File not in self._PrivateIncludes:
+ self._PrivateIncludes.append(File)
+ if File in PublicInclues:
+ EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % File, File=self.MetaFile, Line=LineNo)
+ else:
+ if File not in PublicInclues:
+ PublicInclues.append(File)
+ if File in self._PrivateIncludes:
+ EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % File, File=self.MetaFile, Line=LineNo)
+ if Record[3] == TAB_COMMON:
+ self._CommonIncludes.append(File)
+ return self._Includes
+
+ ## Retrieve library class declarations (not used in build at present)
+ @property
+ def LibraryClasses(self):
+ if self._LibraryClasses is None:
+ #
+ # tdict is a special kind of dict, used for selecting correct
+ # library class declaration for given ARCH
+ #
+ LibraryClassDict = tdict(True)
+ LibraryClassSet = set()
+ RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch]
+ Macros = self._Macros
+ for LibraryClass, File, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
+ File = PathClass(NormPath(File, Macros), self._PackageDir, Arch=self._Arch)
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate()
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
+ LibraryClassSet.add(LibraryClass)
+ LibraryClassDict[Arch, LibraryClass] = File
+ self._LibraryClasses = OrderedDict()
+ for LibraryClass in LibraryClassSet:
+ self._LibraryClasses[LibraryClass] = LibraryClassDict[self._Arch, LibraryClass]
+ return self._LibraryClasses
+
+ ## Retrieve PCD declarations
+ @property
+ def Pcds(self):
+ if self._Pcds is None:
+ self._Pcds = OrderedDict()
+ self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC_EX))
+ return self._Pcds
+
+ def ParsePcdName(self,TokenCName):
+ TokenCName = TokenCName.strip()
+ if TokenCName.startswith("["):
+ if "." in TokenCName:
+ Demesionattr = TokenCName[:TokenCName.index(".")]
+ Fields = TokenCName[TokenCName.index(".")+1:]
+ else:
+ Demesionattr = TokenCName
+ Fields = ""
+ else:
+ Demesionattr = ""
+ Fields = TokenCName
+
+ return Demesionattr,Fields
+
+ def ProcessStructurePcd(self, StructurePcdRawDataSet):
+ s_pcd_set = OrderedDict()
+ for s_pcd, LineNo in StructurePcdRawDataSet:
+ if s_pcd.TokenSpaceGuidCName not in s_pcd_set:
+ s_pcd_set[s_pcd.TokenSpaceGuidCName] = []
+ s_pcd_set[s_pcd.TokenSpaceGuidCName].append((s_pcd, LineNo))
+
+ str_pcd_set = []
+ for pcdname in s_pcd_set:
+ dep_pkgs = []
+ struct_pcd = StructurePcd()
+ for item, LineNo in s_pcd_set[pcdname]:
+ if not item.TokenCName:
+ continue
+ if "<HeaderFiles>" in item.TokenCName:
+ struct_pcd.StructuredPcdIncludeFile.append(item.DefaultValue)
+ elif "<Packages>" in item.TokenCName:
+ dep_pkgs.append(item.DefaultValue)
+ elif item.DatumType == item.TokenCName:
+ struct_pcd.copy(item)
+ struct_pcd.TokenValue = struct_pcd.TokenValue.strip("{").strip()
+ struct_pcd.TokenSpaceGuidCName, struct_pcd.TokenCName = pcdname.split(".")
+ struct_pcd.PcdDefineLineNo = LineNo
+ struct_pcd.PkgPath = self.MetaFile.File
+ struct_pcd.SetDecDefaultValue(item.DefaultValue,self.MetaFile.File,LineNo)
+ else:
+ DemesionAttr, Fields = self.ParsePcdName(item.TokenCName)
+ struct_pcd.AddDefaultValue(Fields, item.DefaultValue, self.MetaFile.File, LineNo,DemesionAttr)
+
+ struct_pcd.PackageDecs = dep_pkgs
+ str_pcd_set.append(struct_pcd)
+ return str_pcd_set
+
+ ## Retrieve PCD declarations for given type
+ def _GetPcd(self, Type):
+ Pcds = OrderedDict()
+ #
+ # tdict is a special kind of dict, used for selecting correct
+ # PCD declaration for given ARCH
+ #
+ PcdDict = tdict(True, 3)
+ # for summarizing PCD
+ PcdSet = []
+ # find out all PCDs of the 'type'
+
+ StrPcdSet = []
+ RecordList = self._RawData[Type, self._Arch]
+ for TokenSpaceGuid, PcdCName, Setting, Arch, PrivateFlag, Dummy1, Dummy2 in RecordList:
+ PcdDict[Arch, PcdCName, TokenSpaceGuid] = (Setting, Dummy2)
+ if not (PcdCName, TokenSpaceGuid) in PcdSet:
+ PcdSet.append((PcdCName, TokenSpaceGuid))
+
+ DefinitionPosition = {}
+ for PcdCName, TokenSpaceGuid in PcdSet:
+ #
+ # limit the ARCH to self._Arch, if no self._Arch found, tdict
+ # will automatically turn to 'common' ARCH and try again
+ #
+ Setting, LineNo = PcdDict[self._Arch, PcdCName, TokenSpaceGuid]
+ if Setting is None:
+ continue
+
+ DefaultValue, DatumType, TokenNumber = AnalyzePcdData(Setting)
+ validateranges, validlists, expressions = self._RawData.GetValidExpression(TokenSpaceGuid, PcdCName)
+ PcdObj = PcdClassObject(
+ PcdCName,
+ TokenSpaceGuid,
+ self._PCD_TYPE_STRING_[Type],
+ DatumType,
+ DefaultValue,
+ TokenNumber,
+ '',
+ {},
+ False,
+ None,
+ list(validateranges),
+ list(validlists),
+ list(expressions)
+ )
+ DefinitionPosition[PcdObj] = (self.MetaFile.File, LineNo)
+ if "." in TokenSpaceGuid:
+ StrPcdSet.append((PcdObj, LineNo))
+ else:
+ Pcds[PcdCName, TokenSpaceGuid, self._PCD_TYPE_STRING_[Type]] = PcdObj
+
+ StructurePcds = self.ProcessStructurePcd(StrPcdSet)
+ for pcd in StructurePcds:
+ Pcds[pcd.TokenCName, pcd.TokenSpaceGuidCName, self._PCD_TYPE_STRING_[Type]] = pcd
+ StructPattern = compile(r'[_a-zA-Z][0-9A-Za-z_]*$')
+ for pcd in Pcds.values():
+ if pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
+ if not pcd.IsAggregateDatumType():
+ EdkLogger.error('build', FORMAT_INVALID, "DatumType only support BOOLEAN, UINT8, UINT16, UINT32, UINT64, VOID* or a valid struct name.", DefinitionPosition[pcd][0], DefinitionPosition[pcd][1])
+ elif not pcd.IsArray() and not pcd.StructuredPcdIncludeFile:
+ EdkLogger.error("build", PCD_STRUCTURE_PCD_ERROR, "The structure Pcd %s.%s header file is not found in %s line %s \n" % (pcd.TokenSpaceGuidCName, pcd.TokenCName, pcd.DefinitionPosition[0], pcd.DefinitionPosition[1] ))
+ return Pcds
+
+ @property
+ def CommonIncludes(self):
+ if self._CommonIncludes is None:
+ self.Includes
+ return self._CommonIncludes
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/DscBuildData.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/DscBuildData.py
new file mode 100755
index 00000000..eeefc9f5
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/DscBuildData.py
@@ -0,0 +1,3588 @@
+## @file
+# This file is used to create a database used by build tool
+#
+# Copyright (c) 2008 - 2020, Intel Corporation. All rights reserved.<BR>
+# (C) Copyright 2016 Hewlett Packard Enterprise Development LP<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Platform build information from DSC file
+#
+# This class is used to retrieve information stored in database and convert them
+# into PlatformBuildClassObject form for easier use for AutoGen.
+#
+from __future__ import print_function
+from __future__ import absolute_import
+from Common.StringUtils import *
+from Common.DataType import *
+from Common.Misc import *
+from types import *
+from Common.Expression import *
+from CommonDataClass.CommonClass import SkuInfoClass
+from Common.TargetTxtClassObject import TargetTxtDict
+from Common.ToolDefClassObject import ToolDefDict
+from .MetaDataTable import *
+from .MetaFileTable import *
+from .MetaFileParser import *
+
+from .WorkspaceCommon import GetDeclaredPcd
+from Common.Misc import AnalyzeDscPcd
+from Common.Misc import ProcessDuplicatedInf,RemoveCComments,ArrayIndex
+import re
+from Common.Parsing import IsValidWord
+from Common.VariableAttributes import VariableAttributes
+import Common.GlobalData as GlobalData
+import subprocess
+from functools import reduce
+from Common.Misc import SaveFileOnChange
+from Workspace.BuildClassObject import PlatformBuildClassObject, StructurePcd, PcdClassObject, ModuleBuildClassObject
+from collections import OrderedDict, defaultdict
+
+def _IsFieldValueAnArray (Value):
+ Value = Value.strip()
+ if Value.startswith(TAB_GUID) and Value.endswith(')'):
+ return True
+ if Value.startswith('L"') and Value.endswith('"') and len(list(Value[2:-1])) > 1:
+ return True
+ if Value[0] == '"' and Value[-1] == '"' and len(list(Value[1:-1])) > 1:
+ return True
+ if Value[0] == '{' and Value[-1] == '}':
+ return True
+ if Value.startswith("L'") and Value.endswith("'") and len(list(Value[2:-1])) > 1:
+ return True
+ if Value[0] == "'" and Value[-1] == "'" and len(list(Value[1:-1])) > 1:
+ return True
+ return False
+
+PcdValueInitName = 'PcdValueInit'
+PcdValueCommonName = 'PcdValueCommon'
+
+PcdMainCHeader = '''
+/**
+ DO NOT EDIT
+ FILE auto-generated
+**/
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <PcdValueCommon.h>
+'''
+
+PcdMainCEntry = '''
+int
+main (
+ int argc,
+ char *argv[]
+ )
+{
+ return PcdValueMain (argc, argv);
+}
+'''
+
+PcdMakefileHeader = '''
+#
+# DO NOT EDIT
+# This file is auto-generated by build utility
+#
+
+'''
+
+WindowsCFLAGS = 'CFLAGS = $(CFLAGS) /wd4200 /wd4034 /wd4101 '
+LinuxCFLAGS = 'BUILD_CFLAGS += -Wno-pointer-to-int-cast -Wno-unused-variable '
+PcdMakefileEnd = '''
+!INCLUDE $(BASE_TOOLS_PATH)\Source\C\Makefiles\ms.common
+!INCLUDE $(BASE_TOOLS_PATH)\Source\C\Makefiles\ms.app
+'''
+
+AppTarget = '''
+all: $(APPFILE)
+$(APPFILE): $(OBJECTS)
+%s
+'''
+
+PcdGccMakefile = '''
+MAKEROOT ?= $(EDK_TOOLS_PATH)/Source/C
+LIBS = -lCommon
+'''
+
+variablePattern = re.compile(r'[\t\s]*0[xX][a-fA-F0-9]+$')
+SkuIdPattern = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*$')
+## regular expressions for finding decimal and hex numbers
+Pattern = re.compile('^[1-9]\d*|0$')
+HexPattern = re.compile(r'0[xX][0-9a-fA-F]+$')
+## Regular expression for finding header file inclusions
+from AutoGen.GenMake import gIncludePattern
+
+## Find dependencies for one source file
+#
+# By searching recursively "#include" directive in file, find out all the
+# files needed by given source file. The dependecies will be only searched
+# in given search path list.
+#
+# @param SearchPathList The list of search path
+#
+# @retval list The list of files the given source file depends on
+#
+def GetDependencyList(FileStack, SearchPathList):
+ DepDb = dict()
+ DependencySet = set(FileStack)
+ while len(FileStack) > 0:
+ F = FileStack.pop()
+ FullPathDependList = []
+ CurrentFileDependencyList = []
+ if F in DepDb:
+ CurrentFileDependencyList = DepDb[F]
+ else:
+ try:
+ Fd = open(F, 'r')
+ FileContent = Fd.read()
+ except BaseException as X:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F + "\n\t" + str(X))
+ finally:
+ if "Fd" in dir(locals()):
+ Fd.close()
+
+ if len(FileContent) == 0:
+ continue
+
+ try:
+ if FileContent[0] == 0xff or FileContent[0] == 0xfe:
+ FileContent = FileContent.decode('utf-16')
+ else:
+ FileContent = FileContent.decode()
+ except:
+ # The file is not txt file. for example .mcb file
+ continue
+ IncludedFileList = gIncludePattern.findall(FileContent)
+
+ for Inc in IncludedFileList:
+ Inc = Inc.strip()
+ Inc = os.path.normpath(Inc)
+ CurrentFileDependencyList.append(Inc)
+ DepDb[F] = CurrentFileDependencyList
+
+ CurrentFilePath = os.path.dirname(F)
+ PathList = [CurrentFilePath] + SearchPathList
+ for Inc in CurrentFileDependencyList:
+ for SearchPath in PathList:
+ FilePath = os.path.join(SearchPath, Inc)
+ if not os.path.exists(FilePath):
+ continue
+ if FilePath not in DependencySet:
+ FileStack.append(FilePath)
+ FullPathDependList.append(FilePath)
+ break
+ DependencySet.update(FullPathDependList)
+ DependencyList = list(DependencySet) # remove duplicate ones
+
+ return DependencyList
+
+class DscBuildData(PlatformBuildClassObject):
+ # dict used to convert PCD type in database to string used by build tool
+ _PCD_TYPE_STRING_ = {
+ MODEL_PCD_FIXED_AT_BUILD : TAB_PCDS_FIXED_AT_BUILD,
+ MODEL_PCD_PATCHABLE_IN_MODULE : TAB_PCDS_PATCHABLE_IN_MODULE,
+ MODEL_PCD_FEATURE_FLAG : TAB_PCDS_FEATURE_FLAG,
+ MODEL_PCD_DYNAMIC : TAB_PCDS_DYNAMIC,
+ MODEL_PCD_DYNAMIC_DEFAULT : TAB_PCDS_DYNAMIC,
+ MODEL_PCD_DYNAMIC_HII : TAB_PCDS_DYNAMIC_HII,
+ MODEL_PCD_DYNAMIC_VPD : TAB_PCDS_DYNAMIC_VPD,
+ MODEL_PCD_DYNAMIC_EX : TAB_PCDS_DYNAMIC_EX,
+ MODEL_PCD_DYNAMIC_EX_DEFAULT : TAB_PCDS_DYNAMIC_EX,
+ MODEL_PCD_DYNAMIC_EX_HII : TAB_PCDS_DYNAMIC_EX_HII,
+ MODEL_PCD_DYNAMIC_EX_VPD : TAB_PCDS_DYNAMIC_EX_VPD,
+ }
+
+ # dict used to convert part of [Defines] to members of DscBuildData directly
+ _PROPERTY_ = {
+ #
+ # Required Fields
+ #
+ TAB_DSC_DEFINES_PLATFORM_NAME : "_PlatformName",
+ TAB_DSC_DEFINES_PLATFORM_GUID : "_Guid",
+ TAB_DSC_DEFINES_PLATFORM_VERSION : "_Version",
+ TAB_DSC_DEFINES_DSC_SPECIFICATION : "_DscSpecification",
+ # TAB_DSC_DEFINES_OUTPUT_DIRECTORY : "_OutputDirectory",
+ # TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES : "_SupArchList",
+ # TAB_DSC_DEFINES_BUILD_TARGETS : "_BuildTargets",
+ TAB_DSC_DEFINES_SKUID_IDENTIFIER : "_SkuName",
+ # TAB_DSC_DEFINES_FLASH_DEFINITION : "_FlashDefinition",
+ TAB_DSC_DEFINES_BUILD_NUMBER : "_BuildNumber",
+ TAB_DSC_DEFINES_MAKEFILE_NAME : "_MakefileName",
+ TAB_DSC_DEFINES_BS_BASE_ADDRESS : "_BsBaseAddress",
+ TAB_DSC_DEFINES_RT_BASE_ADDRESS : "_RtBaseAddress",
+ # TAB_DSC_DEFINES_RFC_LANGUAGES : "_RFCLanguages",
+ # TAB_DSC_DEFINES_ISO_LANGUAGES : "_ISOLanguages",
+ }
+
+ # used to compose dummy library class name for those forced library instances
+ _NullLibraryNumber = 0
+
+ ## Constructor of DscBuildData
+ #
+ # Initialize object of DscBuildData
+ #
+ # @param FilePath The path of platform description file
+ # @param RawData The raw data of DSC file
+ # @param BuildDataBase Database used to retrieve module/package information
+ # @param Arch The target architecture
+ # @param Platform (not used for DscBuildData)
+ # @param Macros Macros used for replacement in DSC file
+ #
+ def __init__(self, FilePath, RawData, BuildDataBase, Arch=TAB_ARCH_COMMON, Target=None, Toolchain=None):
+ self.MetaFile = FilePath
+ self._RawData = RawData
+ self._Bdb = BuildDataBase
+ self._Arch = Arch
+ self._Target = Target
+ self._Toolchain = Toolchain
+ self._ToolChainFamily = None
+ self._Clear()
+ self.WorkspaceDir = os.getenv("WORKSPACE") if os.getenv("WORKSPACE") else ""
+ self.DefaultStores = None
+ self.SkuIdMgr = SkuClass(self.SkuName, self.SkuIds)
+
+ @property
+ def OutputPath(self):
+ if os.getenv("WORKSPACE"):
+ return os.path.join(os.getenv("WORKSPACE"), self.OutputDirectory, self._Target + "_" + self._Toolchain, PcdValueInitName)
+ else:
+ return os.path.dirname(self.DscFile)
+
+ ## XXX[key] = value
+ def __setitem__(self, key, value):
+ self.__dict__[self._PROPERTY_[key]] = value
+
+ ## value = XXX[key]
+ def __getitem__(self, key):
+ return self.__dict__[self._PROPERTY_[key]]
+
+ ## "in" test support
+ def __contains__(self, key):
+ return key in self._PROPERTY_
+
+ ## Set all internal used members of DscBuildData to None
+ def _Clear(self):
+ self._Header = None
+ self._PlatformName = None
+ self._Guid = None
+ self._Version = None
+ self._DscSpecification = None
+ self._OutputDirectory = None
+ self._SupArchList = None
+ self._BuildTargets = None
+ self._SkuName = None
+ self._PcdInfoFlag = None
+ self._VarCheckFlag = None
+ self._FlashDefinition = None
+ self._Prebuild = None
+ self._Postbuild = None
+ self._BuildNumber = None
+ self._MakefileName = None
+ self._BsBaseAddress = None
+ self._RtBaseAddress = None
+ self._SkuIds = None
+ self._Modules = None
+ self._LibraryInstances = None
+ self._LibraryClasses = None
+ self._Pcds = None
+ self._DecPcds = None
+ self._BuildOptions = None
+ self._ModuleTypeOptions = None
+ self._LoadFixAddress = None
+ self._RFCLanguages = None
+ self._ISOLanguages = None
+ self._VpdToolGuid = None
+ self._MacroDict = None
+ self.DefaultStores = None
+
+ ## Get current effective macros
+ @property
+ def _Macros(self):
+ if self._MacroDict is None:
+ self._MacroDict = {}
+ self._MacroDict.update(GlobalData.gPlatformDefines)
+ self._MacroDict.update(GlobalData.gGlobalDefines)
+ self._MacroDict.update(GlobalData.gCommandLineDefines)
+ return self._MacroDict
+
+ ## Get architecture
+ @property
+ def Arch(self):
+ return self._Arch
+ @property
+ def Dir(self):
+ return self.MetaFile.Dir
+
+ ## Retrieve all information in [Defines] section
+ #
+ # (Retrieving all [Defines] information in one-shot is just to save time.)
+ #
+ def _GetHeaderInfo(self):
+ RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch]
+ for Record in RecordList:
+ Name = Record[1]
+ # items defined _PROPERTY_ don't need additional processing
+
+ # some special items in [Defines] section need special treatment
+ if Name == TAB_DSC_DEFINES_OUTPUT_DIRECTORY:
+ self._OutputDirectory = NormPath(Record[2], self._Macros)
+ if ' ' in self._OutputDirectory:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in OUTPUT_DIRECTORY",
+ File=self.MetaFile, Line=Record[-1],
+ ExtraData=self._OutputDirectory)
+ elif Name == TAB_DSC_DEFINES_FLASH_DEFINITION:
+ self._FlashDefinition = PathClass(NormPath(Record[2], self._Macros), GlobalData.gWorkspace)
+ ErrorCode, ErrorInfo = self._FlashDefinition.Validate('.fdf')
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=Record[-1],
+ ExtraData=ErrorInfo)
+ elif Name == TAB_DSC_PREBUILD:
+ PrebuildValue = Record[2]
+ if Record[2][0] == '"':
+ if Record[2][-1] != '"':
+ EdkLogger.error('build', FORMAT_INVALID, 'Missing double quotes in the end of %s statement.' % TAB_DSC_PREBUILD,
+ File=self.MetaFile, Line=Record[-1])
+ PrebuildValue = Record[2][1:-1]
+ self._Prebuild = PrebuildValue
+ elif Name == TAB_DSC_POSTBUILD:
+ PostbuildValue = Record[2]
+ if Record[2][0] == '"':
+ if Record[2][-1] != '"':
+ EdkLogger.error('build', FORMAT_INVALID, 'Missing double quotes in the end of %s statement.' % TAB_DSC_POSTBUILD,
+ File=self.MetaFile, Line=Record[-1])
+ PostbuildValue = Record[2][1:-1]
+ self._Postbuild = PostbuildValue
+ elif Name == TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES:
+ self._SupArchList = GetSplitValueList(Record[2], TAB_VALUE_SPLIT)
+ elif Name == TAB_DSC_DEFINES_BUILD_TARGETS:
+ self._BuildTargets = GetSplitValueList(Record[2])
+ elif Name == TAB_DSC_DEFINES_SKUID_IDENTIFIER:
+ if self._SkuName is None:
+ self._SkuName = Record[2]
+ if GlobalData.gSKUID_CMD:
+ self._SkuName = GlobalData.gSKUID_CMD
+ elif Name == TAB_DSC_DEFINES_PCD_INFO_GENERATION:
+ self._PcdInfoFlag = Record[2]
+ elif Name == TAB_DSC_DEFINES_PCD_VAR_CHECK_GENERATION:
+ self._VarCheckFlag = Record[2]
+ elif Name == TAB_FIX_LOAD_TOP_MEMORY_ADDRESS:
+ try:
+ self._LoadFixAddress = int (Record[2], 0)
+ except:
+ EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS %s is not valid dec or hex string" % (Record[2]))
+ elif Name == TAB_DSC_DEFINES_RFC_LANGUAGES:
+ if not Record[2] or Record[2][0] != '"' or Record[2][-1] != '"' or len(Record[2]) == 1:
+ EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'language code for RFC_LANGUAGES must have double quotes around it, for example: RFC_LANGUAGES = "en-us;zh-hans"',
+ File=self.MetaFile, Line=Record[-1])
+ LanguageCodes = Record[2][1:-1]
+ if not LanguageCodes:
+ EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'one or more RFC4646 format language code must be provided for RFC_LANGUAGES statement',
+ File=self.MetaFile, Line=Record[-1])
+ LanguageList = GetSplitValueList(LanguageCodes, TAB_SEMI_COLON_SPLIT)
+ # check whether there is empty entries in the list
+ if None in LanguageList:
+ EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'one or more empty language code is in RFC_LANGUAGES statement',
+ File=self.MetaFile, Line=Record[-1])
+ self._RFCLanguages = LanguageList
+ elif Name == TAB_DSC_DEFINES_ISO_LANGUAGES:
+ if not Record[2] or Record[2][0] != '"' or Record[2][-1] != '"' or len(Record[2]) == 1:
+ EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'language code for ISO_LANGUAGES must have double quotes around it, for example: ISO_LANGUAGES = "engchn"',
+ File=self.MetaFile, Line=Record[-1])
+ LanguageCodes = Record[2][1:-1]
+ if not LanguageCodes:
+ EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'one or more ISO639-2 format language code must be provided for ISO_LANGUAGES statement',
+ File=self.MetaFile, Line=Record[-1])
+ if len(LanguageCodes) % 3:
+ EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'bad ISO639-2 format for ISO_LANGUAGES',
+ File=self.MetaFile, Line=Record[-1])
+ LanguageList = []
+ for i in range(0, len(LanguageCodes), 3):
+ LanguageList.append(LanguageCodes[i:i + 3])
+ self._ISOLanguages = LanguageList
+ elif Name == TAB_DSC_DEFINES_VPD_TOOL_GUID:
+ #
+ # try to convert GUID to a real UUID value to see whether the GUID is format
+ # for VPD_TOOL_GUID is correct.
+ #
+ try:
+ uuid.UUID(Record[2])
+ except:
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid GUID format for VPD_TOOL_GUID", File=self.MetaFile)
+ self._VpdToolGuid = Record[2]
+ elif Name in self:
+ self[Name] = Record[2]
+ # set _Header to non-None in order to avoid database re-querying
+ self._Header = 'DUMMY'
+
+ ## Retrieve platform name
+ @property
+ def PlatformName(self):
+ if self._PlatformName is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._PlatformName is None:
+ EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_NAME", File=self.MetaFile)
+ return self._PlatformName
+
+ @property
+ def Platform(self):
+ return self.PlatformName
+
+ ## Retrieve file guid
+ @property
+ def Guid(self):
+ if self._Guid is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._Guid is None:
+ EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_GUID", File=self.MetaFile)
+ return self._Guid
+
+ ## Retrieve platform version
+ @property
+ def Version(self):
+ if self._Version is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._Version is None:
+ EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_VERSION", File=self.MetaFile)
+ return self._Version
+
+ ## Retrieve platform description file version
+ @property
+ def DscSpecification(self):
+ if self._DscSpecification is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._DscSpecification is None:
+ EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No DSC_SPECIFICATION", File=self.MetaFile)
+ return self._DscSpecification
+
+ ## Retrieve OUTPUT_DIRECTORY
+ @property
+ def OutputDirectory(self):
+ if self._OutputDirectory is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._OutputDirectory is None:
+ self._OutputDirectory = os.path.join("Build", self._PlatformName)
+ return self._OutputDirectory
+
+ ## Retrieve SUPPORTED_ARCHITECTURES
+ @property
+ def SupArchList(self):
+ if self._SupArchList is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._SupArchList is None:
+ EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No SUPPORTED_ARCHITECTURES", File=self.MetaFile)
+ return self._SupArchList
+
+ ## Retrieve BUILD_TARGETS
+ @property
+ def BuildTargets(self):
+ if self._BuildTargets is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._BuildTargets is None:
+ EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No BUILD_TARGETS", File=self.MetaFile)
+ return self._BuildTargets
+
+ @property
+ def PcdInfoFlag(self):
+ if self._PcdInfoFlag is None or self._PcdInfoFlag.upper() == 'FALSE':
+ return False
+ elif self._PcdInfoFlag.upper() == 'TRUE':
+ return True
+ else:
+ return False
+
+ @property
+ def VarCheckFlag(self):
+ if self._VarCheckFlag is None or self._VarCheckFlag.upper() == 'FALSE':
+ return False
+ elif self._VarCheckFlag.upper() == 'TRUE':
+ return True
+ else:
+ return False
+
+ # # Retrieve SKUID_IDENTIFIER
+ @property
+ def SkuName(self):
+ if self._SkuName is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._SkuName is None:
+ self._SkuName = TAB_DEFAULT
+ return self._SkuName
+
+ ## Override SKUID_IDENTIFIER
+ @SkuName.setter
+ def SkuName(self, Value):
+ self._SkuName = Value
+
+ @property
+ def FlashDefinition(self):
+ if self._FlashDefinition is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._FlashDefinition is None:
+ self._FlashDefinition = ''
+ return self._FlashDefinition
+
+ @property
+ def Prebuild(self):
+ if self._Prebuild is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._Prebuild is None:
+ self._Prebuild = ''
+ return self._Prebuild
+
+ @property
+ def Postbuild(self):
+ if self._Postbuild is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._Postbuild is None:
+ self._Postbuild = ''
+ return self._Postbuild
+
+ ## Retrieve FLASH_DEFINITION
+ @property
+ def BuildNumber(self):
+ if self._BuildNumber is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._BuildNumber is None:
+ self._BuildNumber = ''
+ return self._BuildNumber
+
+ ## Retrieve MAKEFILE_NAME
+ @property
+ def MakefileName(self):
+ if self._MakefileName is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._MakefileName is None:
+ self._MakefileName = ''
+ return self._MakefileName
+
+ ## Retrieve BsBaseAddress
+ @property
+ def BsBaseAddress(self):
+ if self._BsBaseAddress is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._BsBaseAddress is None:
+ self._BsBaseAddress = ''
+ return self._BsBaseAddress
+
+ ## Retrieve RtBaseAddress
+ @property
+ def RtBaseAddress(self):
+ if self._RtBaseAddress is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._RtBaseAddress is None:
+ self._RtBaseAddress = ''
+ return self._RtBaseAddress
+
+ ## Retrieve the top address for the load fix address
+ @property
+ def LoadFixAddress(self):
+ if self._LoadFixAddress is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+
+ if self._LoadFixAddress is None:
+ self._LoadFixAddress = self._Macros.get(TAB_FIX_LOAD_TOP_MEMORY_ADDRESS, '0')
+
+ try:
+ self._LoadFixAddress = int (self._LoadFixAddress, 0)
+ except:
+ EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS %s is not valid dec or hex string" % (self._LoadFixAddress))
+
+ #
+ # If command line defined, should override the value in DSC file.
+ #
+ if 'FIX_LOAD_TOP_MEMORY_ADDRESS' in GlobalData.gCommandLineDefines:
+ try:
+ self._LoadFixAddress = int(GlobalData.gCommandLineDefines['FIX_LOAD_TOP_MEMORY_ADDRESS'], 0)
+ except:
+ EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS %s is not valid dec or hex string" % (GlobalData.gCommandLineDefines['FIX_LOAD_TOP_MEMORY_ADDRESS']))
+
+ if self._LoadFixAddress < 0:
+ EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS is set to the invalid negative value 0x%x" % (self._LoadFixAddress))
+ if self._LoadFixAddress != 0xFFFFFFFFFFFFFFFF and self._LoadFixAddress % 0x1000 != 0:
+ EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS is set to the invalid unaligned 4K value 0x%x" % (self._LoadFixAddress))
+
+ return self._LoadFixAddress
+
+ ## Retrieve RFCLanguage filter
+ @property
+ def RFCLanguages(self):
+ if self._RFCLanguages is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._RFCLanguages is None:
+ self._RFCLanguages = []
+ return self._RFCLanguages
+
+ ## Retrieve ISOLanguage filter
+ @property
+ def ISOLanguages(self):
+ if self._ISOLanguages is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._ISOLanguages is None:
+ self._ISOLanguages = []
+ return self._ISOLanguages
+
+ ## Retrieve the GUID string for VPD tool
+ @property
+ def VpdToolGuid(self):
+ if self._VpdToolGuid is None:
+ if self._Header is None:
+ self._GetHeaderInfo()
+ if self._VpdToolGuid is None:
+ self._VpdToolGuid = ''
+ return self._VpdToolGuid
+
+ ## Retrieve [SkuIds] section information
+ @property
+ def SkuIds(self):
+ if self._SkuIds is None:
+ self._SkuIds = OrderedDict()
+ RecordList = self._RawData[MODEL_EFI_SKU_ID, self._Arch]
+ for Record in RecordList:
+ if not Record[0]:
+ EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID number',
+ File=self.MetaFile, Line=Record[-1])
+ if not Record[1]:
+ EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID name',
+ File=self.MetaFile, Line=Record[-1])
+ if not Pattern.match(Record[0]) and not HexPattern.match(Record[0]):
+ EdkLogger.error('build', FORMAT_INVALID, "The format of the Sku ID number is invalid. It only support Integer and HexNumber",
+ File=self.MetaFile, Line=Record[-1])
+ if not SkuIdPattern.match(Record[1]) or (Record[2] and not SkuIdPattern.match(Record[2])):
+ EdkLogger.error('build', FORMAT_INVALID, "The format of the Sku ID name is invalid. The correct format is '(a-zA-Z_)(a-zA-Z0-9_)*'",
+ File=self.MetaFile, Line=Record[-1])
+ self._SkuIds[Record[1].upper()] = (str(DscBuildData.ToInt(Record[0])), Record[1].upper(), Record[2].upper())
+ if TAB_DEFAULT not in self._SkuIds:
+ self._SkuIds[TAB_DEFAULT] = ("0", TAB_DEFAULT, TAB_DEFAULT)
+ if TAB_COMMON not in self._SkuIds:
+ self._SkuIds[TAB_COMMON] = ("0", TAB_DEFAULT, TAB_DEFAULT)
+ return self._SkuIds
+
+ @staticmethod
+ def ToInt(intstr):
+ return int(intstr, 16) if intstr.upper().startswith("0X") else int(intstr)
+
+ def _GetDefaultStores(self):
+ if self.DefaultStores is None:
+ self.DefaultStores = OrderedDict()
+ RecordList = self._RawData[MODEL_EFI_DEFAULT_STORES, self._Arch]
+ for Record in RecordList:
+ if not Record[0]:
+ EdkLogger.error('build', FORMAT_INVALID, 'No DefaultStores ID number',
+ File=self.MetaFile, Line=Record[-1])
+ if not Record[1]:
+ EdkLogger.error('build', FORMAT_INVALID, 'No DefaultStores ID name',
+ File=self.MetaFile, Line=Record[-1])
+ if not Pattern.match(Record[0]) and not HexPattern.match(Record[0]):
+ EdkLogger.error('build', FORMAT_INVALID, "The format of the DefaultStores ID number is invalid. It only support Integer and HexNumber",
+ File=self.MetaFile, Line=Record[-1])
+ if not IsValidWord(Record[1]):
+ EdkLogger.error('build', FORMAT_INVALID, "The format of the DefaultStores ID name is invalid. The correct format is '(a-zA-Z0-9_)(a-zA-Z0-9_-.)*'",
+ File=self.MetaFile, Line=Record[-1])
+ self.DefaultStores[Record[1].upper()] = (DscBuildData.ToInt(Record[0]), Record[1].upper())
+ if TAB_DEFAULT_STORES_DEFAULT not in self.DefaultStores:
+ self.DefaultStores[TAB_DEFAULT_STORES_DEFAULT] = (0, TAB_DEFAULT_STORES_DEFAULT)
+ GlobalData.gDefaultStores = sorted(self.DefaultStores.keys())
+ return self.DefaultStores
+
+ def OverrideDuplicateModule(self):
+ RecordList = self._RawData[MODEL_META_DATA_COMPONENT, self._Arch]
+ Macros = self._Macros
+ Components = {}
+ for Record in RecordList:
+ ModuleId = Record[6]
+ file_guid = self._RawData[MODEL_META_DATA_HEADER, self._Arch, None, ModuleId]
+ file_guid_str = file_guid[0][2] if file_guid else "NULL"
+ ModuleFile = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
+ if self._Arch != TAB_ARCH_COMMON and (file_guid_str,str(ModuleFile)) in Components:
+ self._RawData.DisableOverrideComponent(Components[(file_guid_str,str(ModuleFile))])
+ Components[(file_guid_str,str(ModuleFile))] = ModuleId
+ self._RawData._PostProcessed = False
+
+ ## Retrieve packages this Platform depends on
+ @cached_property
+ def Packages(self):
+ RetVal = set()
+ RecordList = self._RawData[MODEL_META_DATA_PACKAGE, self._Arch]
+ Macros = self._Macros
+ for Record in RecordList:
+ File = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate('.dec')
+ if ErrorCode != 0:
+ LineNo = Record[-1]
+ EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
+ # parse this package now. we need it to get protocol/ppi/guid value
+ RetVal.add(self._Bdb[File, self._Arch, self._Target, self._Toolchain])
+ return RetVal
+
+ ## Retrieve [Components] section information
+ @property
+ def Modules(self):
+ if self._Modules is not None:
+ return self._Modules
+ self.OverrideDuplicateModule()
+ self._Modules = OrderedDict()
+ RecordList = self._RawData[MODEL_META_DATA_COMPONENT, self._Arch]
+ Macros = self._Macros
+ for Record in RecordList:
+ ModuleFile = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
+ ModuleId = Record[6]
+ LineNo = Record[7]
+
+ # check the file validation
+ ErrorCode, ErrorInfo = ModuleFile.Validate('.inf')
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
+ ExtraData=ErrorInfo)
+
+ ModuleBuildData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]
+ Module = ModuleBuildClassObject()
+ Module.MetaFile = ModuleFile
+ Module.Guid = ModuleBuildData.Guid
+ # get module private library instance
+ RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, None, ModuleId]
+ for Record in RecordList:
+ LibraryClass = Record[0]
+ LibraryPath = PathClass(NormPath(Record[1], Macros), GlobalData.gWorkspace, Arch=self._Arch)
+ LineNo = Record[-1]
+
+ # check the file validation
+ ErrorCode, ErrorInfo = LibraryPath.Validate('.inf')
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
+ ExtraData=ErrorInfo)
+
+ if LibraryClass == '' or LibraryClass == 'NULL':
+ self._NullLibraryNumber += 1
+ LibraryClass = 'NULL%d' % self._NullLibraryNumber
+ EdkLogger.verbose("Found forced library for %s\n\t%s [%s]" % (ModuleFile, LibraryPath, LibraryClass))
+ Module.LibraryClasses[LibraryClass] = LibraryPath
+ if LibraryPath not in self.LibraryInstances:
+ self.LibraryInstances.append(LibraryPath)
+ S_PcdSet = []
+ # get module private PCD setting
+ for Type in [MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, \
+ MODEL_PCD_FEATURE_FLAG, MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]:
+ RecordList = self._RawData[Type, self._Arch, None, ModuleId]
+ for TokenSpaceGuid, PcdCName, Setting, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
+ TokenList = GetSplitValueList(Setting)
+ DefaultValue = TokenList[0]
+ # the format is PcdName| Value | VOID* | MaxDatumSize
+ if len(TokenList) > 2:
+ MaxDatumSize = TokenList[2]
+ else:
+ MaxDatumSize = ''
+ TypeString = self._PCD_TYPE_STRING_[Type]
+
+ TCName,PCName,DimensionAttr,Field = self.ParsePcdNameStruct(TokenSpaceGuid, PcdCName)
+
+ if ("." in TokenSpaceGuid or "[" in PcdCName):
+ S_PcdSet.append([ TCName,PCName,DimensionAttr,Field, ModuleBuildData.Guid, "", Dummy5, AnalyzePcdExpression(Setting)[0]])
+ DefaultValue = ''
+ if ( PCName,TCName) not in Module.Pcds:
+ Pcd = PcdClassObject(
+ PCName,
+ TCName,
+ TypeString,
+ '',
+ DefaultValue,
+ '',
+ MaxDatumSize,
+ {},
+ False,
+ None,
+ IsDsc=True)
+ Module.Pcds[PCName, TCName] = Pcd
+
+ Module.StrPcdSet = S_PcdSet
+ for TCName,PCName, _,_,_,_,_,_ in S_PcdSet:
+ if (PCName,TCName) in Module.Pcds:
+ Module.StrPcdOverallValue[(PCName,TCName)] = Module.Pcds[(PCName,TCName)].DefaultValue, self.MetaFile,Dummy5
+ # get module private build options
+ RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, None, ModuleId]
+ for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
+ if (ToolChainFamily, ToolChain) not in Module.BuildOptions:
+ Module.BuildOptions[ToolChainFamily, ToolChain] = Option
+ else:
+ OptionString = Module.BuildOptions[ToolChainFamily, ToolChain]
+ Module.BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Option
+
+ RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, None, ModuleId]
+ if RecordList:
+ if len(RecordList) != 1:
+ EdkLogger.error('build', OPTION_UNKNOWN, 'Only FILE_GUID can be listed in <Defines> section.',
+ File=self.MetaFile, ExtraData=str(ModuleFile), Line=LineNo)
+ ModuleFile = ProcessDuplicatedInf(ModuleFile, RecordList[0][2], GlobalData.gWorkspace)
+ ModuleFile.Arch = self._Arch
+ Module.Guid = RecordList[0][2]
+ for item in Module.StrPcdSet:
+ item[4] = RecordList[0][2]
+ self._Modules[ModuleFile] = Module
+ return self._Modules
+
+ ## Retrieve all possible library instances used in this platform
+ @property
+ def LibraryInstances(self):
+ if self._LibraryInstances is None:
+ self.LibraryClasses
+ return self._LibraryInstances
+
+ ## Retrieve [LibraryClasses] information
+ @property
+ def LibraryClasses(self):
+ if self._LibraryClasses is None:
+ self._LibraryInstances = []
+ #
+ # tdict is a special dict kind of type, used for selecting correct
+ # library instance for given library class and module type
+ #
+ LibraryClassDict = tdict(True, 3)
+ # track all library class names
+ LibraryClassSet = set()
+ RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, None, -1]
+ Macros = self._Macros
+ for Record in RecordList:
+ LibraryClass, LibraryInstance, Dummy, Arch, ModuleType, Dummy, Dummy, LineNo = Record
+ if LibraryClass == '' or LibraryClass == 'NULL':
+ self._NullLibraryNumber += 1
+ LibraryClass = 'NULL%d' % self._NullLibraryNumber
+ EdkLogger.verbose("Found forced library for arch=%s\n\t%s [%s]" % (Arch, LibraryInstance, LibraryClass))
+ LibraryClassSet.add(LibraryClass)
+ LibraryInstance = PathClass(NormPath(LibraryInstance, Macros), GlobalData.gWorkspace, Arch=self._Arch)
+ # check the file validation
+ ErrorCode, ErrorInfo = LibraryInstance.Validate('.inf')
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
+ ExtraData=ErrorInfo)
+
+ if ModuleType != TAB_COMMON and ModuleType not in SUP_MODULE_LIST:
+ EdkLogger.error('build', OPTION_UNKNOWN, "Unknown module type [%s]" % ModuleType,
+ File=self.MetaFile, ExtraData=LibraryInstance, Line=LineNo)
+ LibraryClassDict[Arch, ModuleType, LibraryClass] = LibraryInstance
+ if LibraryInstance not in self._LibraryInstances:
+ self._LibraryInstances.append(LibraryInstance)
+
+ # resolve the specific library instance for each class and each module type
+ self._LibraryClasses = tdict(True)
+ for LibraryClass in LibraryClassSet:
+ # try all possible module types
+ for ModuleType in SUP_MODULE_LIST:
+ LibraryInstance = LibraryClassDict[self._Arch, ModuleType, LibraryClass]
+ if LibraryInstance is None:
+ continue
+ self._LibraryClasses[LibraryClass, ModuleType] = LibraryInstance
+
+ RecordList = self._RawData[MODEL_EFI_LIBRARY_INSTANCE, self._Arch]
+ for Record in RecordList:
+ File = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
+ LineNo = Record[-1]
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate('.inf')
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
+ ExtraData=ErrorInfo)
+ if File not in self._LibraryInstances:
+ self._LibraryInstances.append(File)
+ #
+ # we need the module name as the library class name, so we have
+ # to parse it here. (self._Bdb[] will trigger a file parse if it
+ # hasn't been parsed)
+ #
+ Library = self._Bdb[File, self._Arch, self._Target, self._Toolchain]
+ self._LibraryClasses[Library.BaseName, ':dummy:'] = Library
+ return self._LibraryClasses
+
+ def _ValidatePcd(self, PcdCName, TokenSpaceGuid, Setting, PcdType, LineNo):
+ if not self._DecPcds:
+
+ FdfInfList = []
+ if GlobalData.gFdfParser:
+ FdfInfList = GlobalData.gFdfParser.Profile.InfList
+
+ PkgSet = set()
+ for Inf in FdfInfList:
+ ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch=self._Arch)
+ if ModuleFile in self._Modules:
+ continue
+ ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]
+ PkgSet.update(ModuleData.Packages)
+ if self.Packages:
+ PkgSet.update(self.Packages)
+ self._DecPcds, self._GuidDict = GetDeclaredPcd(self, self._Bdb, self._Arch, self._Target, self._Toolchain, PkgSet)
+ self._GuidDict.update(GlobalData.gPlatformPcds)
+
+ if (PcdCName, TokenSpaceGuid) not in self._DecPcds:
+ EdkLogger.error('build', PARSER_ERROR,
+ "Pcd (%s.%s) defined in DSC is not declared in DEC files referenced in INF files in FDF. Arch: ['%s']" % (TokenSpaceGuid, PcdCName, self._Arch),
+ File=self.MetaFile, Line=LineNo)
+ ValueList, IsValid, Index = AnalyzeDscPcd(Setting, PcdType, self._DecPcds[PcdCName, TokenSpaceGuid].DatumType)
+ if not IsValid:
+ if PcdType not in [MODEL_PCD_FEATURE_FLAG, MODEL_PCD_FIXED_AT_BUILD]:
+ EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self.MetaFile, Line=LineNo,
+ ExtraData="%s.%s|%s" % (TokenSpaceGuid, PcdCName, Setting))
+ else:
+ if ValueList[2] == '-1':
+ EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self.MetaFile, Line=LineNo,
+ ExtraData="%s.%s|%s" % (TokenSpaceGuid, PcdCName, Setting))
+ if ValueList[Index]:
+ DatumType = self._DecPcds[PcdCName, TokenSpaceGuid].DatumType
+ if "{CODE(" not in ValueList[Index]:
+ try:
+ ValueList[Index] = ValueExpressionEx(ValueList[Index], DatumType, self._GuidDict)(True)
+ except BadExpression as Value:
+ EdkLogger.error('Parser', FORMAT_INVALID, Value, File=self.MetaFile, Line=LineNo,
+ ExtraData="PCD [%s.%s] Value \"%s\" " % (
+ TokenSpaceGuid, PcdCName, ValueList[Index]))
+ except EvaluationException as Excpt:
+ if hasattr(Excpt, 'Pcd'):
+ if Excpt.Pcd in GlobalData.gPlatformOtherPcds:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Cannot use this PCD (%s) in an expression as"
+ " it must be defined in a [PcdsFixedAtBuild] or [PcdsFeatureFlag] section"
+ " of the DSC file" % Excpt.Pcd,
+ File=self.MetaFile, Line=LineNo)
+ else:
+ EdkLogger.error('Parser', FORMAT_INVALID, "PCD (%s) is not defined in DSC file" % Excpt.Pcd,
+ File=self.MetaFile, Line=LineNo)
+ else:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Invalid expression: %s" % str(Excpt),
+ File=self.MetaFile, Line=LineNo)
+
+ if ValueList[Index]:
+ Valid, ErrStr = CheckPcdDatum(self._DecPcds[PcdCName, TokenSpaceGuid].DatumType, ValueList[Index])
+ if not Valid:
+ EdkLogger.error('build', FORMAT_INVALID, ErrStr, File=self.MetaFile, Line=LineNo,
+ ExtraData="%s.%s" % (TokenSpaceGuid, PcdCName))
+ if PcdType in (MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT, MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE):
+ if self._DecPcds[PcdCName, TokenSpaceGuid].DatumType.strip() != ValueList[1].strip():
+ DecPcd = self._DecPcds[PcdCName, TokenSpaceGuid]
+ EdkLogger.error('build', FORMAT_INVALID,
+ "Pcd datumtype used in DSC file is not the same as its declaration. DatumType:%s"%DecPcd.DatumType,
+ File=self.MetaFile, Line=LineNo,
+ ExtraData="Dsc:%s.%s|%s\n Dec:%s.%s|%s|%s|%s" % (TokenSpaceGuid, PcdCName, Setting, TokenSpaceGuid, \
+ PcdCName, DecPcd.DefaultValue, DecPcd.DatumType, DecPcd.TokenValue))
+ if (TokenSpaceGuid + '.' + PcdCName) in GlobalData.gPlatformPcds:
+ if GlobalData.gPlatformPcds[TokenSpaceGuid + '.' + PcdCName] != ValueList[Index]:
+ GlobalData.gPlatformPcds[TokenSpaceGuid + '.' + PcdCName] = ValueList[Index]
+ return ValueList
+
+ def _FilterPcdBySkuUsage(self, Pcds):
+ available_sku = self.SkuIdMgr.AvailableSkuIdSet
+ sku_usage = self.SkuIdMgr.SkuUsageType
+ if sku_usage == SkuClass.SINGLE:
+ for pcdname in Pcds:
+ pcd = Pcds[pcdname]
+ Pcds[pcdname].SkuInfoList = {TAB_DEFAULT:pcd.SkuInfoList[skuid] for skuid in pcd.SkuInfoList if skuid in available_sku}
+ if isinstance(pcd, StructurePcd) and pcd.SkuOverrideValues:
+ Pcds[pcdname].SkuOverrideValues = {TAB_DEFAULT:pcd.SkuOverrideValues[skuid] for skuid in pcd.SkuOverrideValues if skuid in available_sku}
+ else:
+ for pcdname in Pcds:
+ pcd = Pcds[pcdname]
+ Pcds[pcdname].SkuInfoList = {skuid:pcd.SkuInfoList[skuid] for skuid in pcd.SkuInfoList if skuid in available_sku}
+ if isinstance(pcd, StructurePcd) and pcd.SkuOverrideValues:
+ Pcds[pcdname].SkuOverrideValues = {skuid:pcd.SkuOverrideValues[skuid] for skuid in pcd.SkuOverrideValues if skuid in available_sku}
+ return Pcds
+
+ def CompleteHiiPcdsDefaultStores(self, Pcds):
+ HiiPcd = [Pcds[pcd] for pcd in Pcds if Pcds[pcd].Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]]
+ DefaultStoreMgr = DefaultStore(self.DefaultStores)
+ for pcd in HiiPcd:
+ for skuid in pcd.SkuInfoList:
+ skuobj = pcd.SkuInfoList.get(skuid)
+ if TAB_DEFAULT_STORES_DEFAULT not in skuobj.DefaultStoreDict:
+ PcdDefaultStoreSet = set(defaultstorename for defaultstorename in skuobj.DefaultStoreDict)
+ mindefaultstorename = DefaultStoreMgr.GetMin(PcdDefaultStoreSet)
+ skuobj.DefaultStoreDict[TAB_DEFAULT_STORES_DEFAULT] = skuobj.DefaultStoreDict[mindefaultstorename]
+ return Pcds
+
+ def RecoverCommandLinePcd(self):
+ def UpdateCommandLineValue(pcd):
+ if pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
+ self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
+ pcd.PcdValueFromComm = pcd.DefaultValue
+ elif pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
+ pcd.PcdValueFromComm = pcd.SkuInfoList.get(TAB_DEFAULT).HiiDefaultValue
+ else:
+ pcd.PcdValueFromComm = pcd.SkuInfoList.get(TAB_DEFAULT).DefaultValue
+ for pcd in self._Pcds:
+ if isinstance(self._Pcds[pcd], StructurePcd) and (self._Pcds[pcd].PcdValueFromComm or self._Pcds[pcd].PcdFieldValueFromComm):
+ UpdateCommandLineValue(self._Pcds[pcd])
+
+ def __ParsePcdFromCommandLine(self):
+ if GlobalData.BuildOptionPcd:
+ for i, pcd in enumerate(GlobalData.BuildOptionPcd):
+ if isinstance(pcd, tuple):
+ continue
+ (pcdname, pcdvalue) = pcd.split('=')
+ if not pcdvalue:
+ EdkLogger.error('build', AUTOGEN_ERROR, "No Value specified for the PCD %s." % (pcdname))
+ if '.' in pcdname:
+ (Name1, Name2) = pcdname.split('.', 1)
+ if "." in Name2:
+ (Name3, FieldName) = Name2.split(".", 1)
+ if ((Name3, Name1)) in self.DecPcds:
+ HasTokenSpace = True
+ TokenCName = Name3
+ TokenSpaceGuidCName = Name1
+ else:
+ FieldName = Name2
+ TokenCName = Name1
+ TokenSpaceGuidCName = ''
+ HasTokenSpace = False
+ else:
+ if ((Name2, Name1)) in self.DecPcds:
+ HasTokenSpace = True
+ TokenCName = Name2
+ TokenSpaceGuidCName = Name1
+ FieldName =""
+ else:
+ FieldName = Name2
+ TokenCName = Name1
+ TokenSpaceGuidCName = ''
+ HasTokenSpace = False
+ else:
+ FieldName = ""
+ TokenCName = pcdname
+ TokenSpaceGuidCName = ''
+ HasTokenSpace = False
+ TokenSpaceGuidCNameList = []
+ FoundFlag = False
+ PcdDatumType = ''
+ DisplayName = TokenCName
+ if FieldName:
+ DisplayName = TokenCName + '.' + FieldName
+ if not HasTokenSpace:
+ for key in self.DecPcds:
+ PcdItem = self.DecPcds[key]
+ if TokenCName == PcdItem.TokenCName:
+ if not PcdItem.TokenSpaceGuidCName in TokenSpaceGuidCNameList:
+ if len (TokenSpaceGuidCNameList) < 1:
+ TokenSpaceGuidCNameList.append(PcdItem.TokenSpaceGuidCName)
+ TokenSpaceGuidCName = PcdItem.TokenSpaceGuidCName
+ PcdDatumType = PcdItem.DatumType
+ FoundFlag = True
+ else:
+ EdkLogger.error(
+ 'build',
+ AUTOGEN_ERROR,
+ "The Pcd %s is found under multiple different TokenSpaceGuid: %s and %s." % (DisplayName, PcdItem.TokenSpaceGuidCName, TokenSpaceGuidCNameList[0])
+ )
+ else:
+ if (TokenCName, TokenSpaceGuidCName) in self.DecPcds:
+ PcdDatumType = self.DecPcds[(TokenCName, TokenSpaceGuidCName)].DatumType
+ FoundFlag = True
+ if not FoundFlag:
+ if HasTokenSpace:
+ EdkLogger.error('build', AUTOGEN_ERROR, "The Pcd %s.%s is not found in the DEC file." % (TokenSpaceGuidCName, DisplayName))
+ else:
+ EdkLogger.error('build', AUTOGEN_ERROR, "The Pcd %s is not found in the DEC file." % (DisplayName))
+ pcdvalue = pcdvalue.replace("\\\\\\'", '\\\\\\"').replace('\\\'', '\'').replace('\\\\\\"', "\\'")
+ if FieldName:
+ pcdvalue = DscBuildData.HandleFlexiblePcd(TokenSpaceGuidCName, TokenCName, pcdvalue, PcdDatumType, self._GuidDict, FieldName)
+ else:
+ pcdvalue = DscBuildData.HandleFlexiblePcd(TokenSpaceGuidCName, TokenCName, pcdvalue, PcdDatumType, self._GuidDict)
+ IsValid, Cause = CheckPcdDatum(PcdDatumType, pcdvalue)
+ if not IsValid:
+ EdkLogger.error("build", FORMAT_INVALID, Cause, ExtraData="%s.%s" % (TokenSpaceGuidCName, TokenCName))
+ GlobalData.BuildOptionPcd[i] = (TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, ("build command options", 1))
+
+ if GlobalData.BuildOptionPcd:
+ inf_objs = [item for item in self._Bdb._CACHE_.values() if item.Arch == self.Arch and item.MetaFile.Ext.lower() == '.inf']
+ for pcd in GlobalData.BuildOptionPcd:
+ (TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, _) = pcd
+ for BuildData in inf_objs:
+ for key in BuildData.Pcds:
+ PcdItem = BuildData.Pcds[key]
+ if (TokenSpaceGuidCName, TokenCName) == (PcdItem.TokenSpaceGuidCName, PcdItem.TokenCName) and FieldName =="":
+ PcdItem.DefaultValue = pcdvalue
+ PcdItem.PcdValueFromComm = pcdvalue
+ #In command line, the latter full assign value in commandLine should override the former field assign value.
+ #For example, --pcd Token.pcd.field="" --pcd Token.pcd=H"{}"
+ delete_assign = []
+ field_assign = {}
+ if GlobalData.BuildOptionPcd:
+ for pcdTuple in GlobalData.BuildOptionPcd:
+ TokenSpaceGuid, Token, Field = pcdTuple[0], pcdTuple[1], pcdTuple[2]
+ if Field:
+ if (TokenSpaceGuid, Token) not in field_assign:
+ field_assign[TokenSpaceGuid, Token] = []
+ field_assign[TokenSpaceGuid, Token].append(pcdTuple)
+ else:
+ if (TokenSpaceGuid, Token) in field_assign:
+ delete_assign.extend(field_assign[TokenSpaceGuid, Token])
+ field_assign[TokenSpaceGuid, Token] = []
+ for item in delete_assign:
+ GlobalData.BuildOptionPcd.remove(item)
+
+ @staticmethod
+ def HandleFlexiblePcd(TokenSpaceGuidCName, TokenCName, PcdValue, PcdDatumType, GuidDict, FieldName=''):
+ if FieldName:
+ IsArray = False
+ TokenCName += '.' + FieldName
+ if PcdValue.startswith('H'):
+ if FieldName and _IsFieldValueAnArray(PcdValue[1:]):
+ PcdDatumType = TAB_VOID
+ IsArray = True
+ if FieldName and not IsArray:
+ return PcdValue
+ try:
+ PcdValue = ValueExpressionEx(PcdValue[1:], PcdDatumType, GuidDict)(True)
+ except BadExpression as Value:
+ EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %
+ (TokenSpaceGuidCName, TokenCName, PcdValue, Value))
+ elif PcdValue.startswith("L'") or PcdValue.startswith("'"):
+ if FieldName and _IsFieldValueAnArray(PcdValue):
+ PcdDatumType = TAB_VOID
+ IsArray = True
+ if FieldName and not IsArray:
+ return PcdValue
+ try:
+ PcdValue = ValueExpressionEx(PcdValue, PcdDatumType, GuidDict)(True)
+ except BadExpression as Value:
+ EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %
+ (TokenSpaceGuidCName, TokenCName, PcdValue, Value))
+ elif PcdValue.startswith('L'):
+ PcdValue = 'L"' + PcdValue[1:] + '"'
+ if FieldName and _IsFieldValueAnArray(PcdValue):
+ PcdDatumType = TAB_VOID
+ IsArray = True
+ if FieldName and not IsArray:
+ return PcdValue
+ try:
+ PcdValue = ValueExpressionEx(PcdValue, PcdDatumType, GuidDict)(True)
+ except BadExpression as Value:
+ EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %
+ (TokenSpaceGuidCName, TokenCName, PcdValue, Value))
+ else:
+ if PcdValue.upper() == 'FALSE':
+ PcdValue = str(0)
+ if PcdValue.upper() == 'TRUE':
+ PcdValue = str(1)
+ if not FieldName:
+ if PcdDatumType not in TAB_PCD_NUMERIC_TYPES:
+ PcdValue = '"' + PcdValue + '"'
+ elif not PcdValue.isdigit() and not PcdValue.upper().startswith('0X'):
+ PcdValue = '"' + PcdValue + '"'
+ else:
+ IsArray = False
+ Base = 10
+ if PcdValue.upper().startswith('0X'):
+ Base = 16
+ try:
+ Num = int(PcdValue, Base)
+ except:
+ PcdValue = '"' + PcdValue + '"'
+ if _IsFieldValueAnArray(PcdValue):
+ PcdDatumType = TAB_VOID
+ IsArray = True
+ if not IsArray:
+ return PcdValue
+ try:
+ PcdValue = ValueExpressionEx(PcdValue, PcdDatumType, GuidDict)(True)
+ except BadExpression as Value:
+ EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %
+ (TokenSpaceGuidCName, TokenCName, PcdValue, Value))
+ return PcdValue
+
+ ## Retrieve all PCD settings in platform
+ @property
+ def Pcds(self):
+ if self._Pcds is None:
+ self._Pcds = OrderedDict()
+ self.__ParsePcdFromCommandLine()
+ self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG))
+ self._Pcds.update(self._GetDynamicPcd(MODEL_PCD_DYNAMIC_DEFAULT))
+ self._Pcds.update(self._GetDynamicHiiPcd(MODEL_PCD_DYNAMIC_HII))
+ self._Pcds.update(self._GetDynamicVpdPcd(MODEL_PCD_DYNAMIC_VPD))
+ self._Pcds.update(self._GetDynamicPcd(MODEL_PCD_DYNAMIC_EX_DEFAULT))
+ self._Pcds.update(self._GetDynamicHiiPcd(MODEL_PCD_DYNAMIC_EX_HII))
+ self._Pcds.update(self._GetDynamicVpdPcd(MODEL_PCD_DYNAMIC_EX_VPD))
+
+ self._Pcds = self.CompletePcdValues(self._Pcds)
+ self._Pcds = self.OverrideByFdfOverAll(self._Pcds)
+ self._Pcds = self.OverrideByCommOverAll(self._Pcds)
+ self._Pcds = self.UpdateStructuredPcds(MODEL_PCD_TYPE_LIST, self._Pcds)
+ self._Pcds = self.CompleteHiiPcdsDefaultStores(self._Pcds)
+ self._Pcds = self._FilterPcdBySkuUsage(self._Pcds)
+
+ self.RecoverCommandLinePcd()
+ return self._Pcds
+
+ ## Retrieve [BuildOptions]
+ @property
+ def BuildOptions(self):
+ if self._BuildOptions is None:
+ self._BuildOptions = OrderedDict()
+ #
+ # Retrieve build option for EDKII and EDK style module
+ #
+ for CodeBase in (EDKII_NAME, EDK_NAME):
+ RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, CodeBase]
+ for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
+ if Dummy3.upper() != TAB_COMMON:
+ continue
+ CurKey = (ToolChainFamily, ToolChain, CodeBase)
+ #
+ # Only flags can be appended
+ #
+ if CurKey not in self._BuildOptions or not ToolChain.endswith('_FLAGS') or Option.startswith('='):
+ self._BuildOptions[CurKey] = Option
+ else:
+ if ' ' + Option not in self._BuildOptions[CurKey]:
+ self._BuildOptions[CurKey] += ' ' + Option
+ return self._BuildOptions
+ def GetBuildOptionsByPkg(self, Module, ModuleType):
+
+ local_pkg = os.path.split(Module.LocalPkg())[0]
+ if self._ModuleTypeOptions is None:
+ self._ModuleTypeOptions = OrderedDict()
+ if ModuleType not in self._ModuleTypeOptions:
+ options = OrderedDict()
+ self._ModuleTypeOptions[ ModuleType] = options
+ RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch]
+ for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
+ if Dummy2 not in (TAB_COMMON,local_pkg.upper(),"EDKII"):
+ continue
+ Type = Dummy3
+ if Type.upper() == ModuleType.upper():
+ Key = (ToolChainFamily, ToolChain)
+ if Key not in options or not ToolChain.endswith('_FLAGS') or Option.startswith('='):
+ options[Key] = Option
+ else:
+ if ' ' + Option not in options[Key]:
+ options[Key] += ' ' + Option
+ return self._ModuleTypeOptions[ModuleType]
+ def GetBuildOptionsByModuleType(self, Edk, ModuleType):
+ if self._ModuleTypeOptions is None:
+ self._ModuleTypeOptions = OrderedDict()
+ if (Edk, ModuleType) not in self._ModuleTypeOptions:
+ options = OrderedDict()
+ self._ModuleTypeOptions[Edk, ModuleType] = options
+ DriverType = '%s.%s' % (Edk, ModuleType)
+ CommonDriverType = '%s.%s' % (TAB_COMMON, ModuleType)
+ RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch]
+ for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
+ Type = Dummy2 + '.' + Dummy3
+ if Type.upper() == DriverType.upper() or Type.upper() == CommonDriverType.upper():
+ Key = (ToolChainFamily, ToolChain, Edk)
+ if Key not in options or not ToolChain.endswith('_FLAGS') or Option.startswith('='):
+ options[Key] = Option
+ else:
+ if ' ' + Option not in options[Key]:
+ options[Key] += ' ' + Option
+ return self._ModuleTypeOptions[Edk, ModuleType]
+
+ @staticmethod
+ def GetStructurePcdInfo(PcdSet):
+ structure_pcd_data = defaultdict(list)
+ for item in PcdSet:
+ structure_pcd_data[(item[0], item[1])].append(item)
+
+ return structure_pcd_data
+
+ @staticmethod
+ def OverrideByFdf(StruPcds,workspace):
+ if GlobalData.gFdfParser is None:
+ return StruPcds
+ StructurePcdInFdf = OrderedDict()
+ fdfpcd = GlobalData.gFdfParser.Profile.PcdDict
+ fdfpcdlocation = GlobalData.gFdfParser.Profile.PcdLocalDict
+ for item in fdfpcd :
+ if len(item[2]) and (item[0],item[1]) in StruPcds:
+ StructurePcdInFdf[(item[1],item[0],item[2] )] = fdfpcd[item]
+ GlobalPcds = {(item[0],item[1]) for item in StructurePcdInFdf}
+ for Pcd in StruPcds.values():
+ if (Pcd.TokenSpaceGuidCName,Pcd.TokenCName) not in GlobalPcds:
+ continue
+ FieldValues = OrderedDict()
+ for item in StructurePcdInFdf:
+ if (Pcd.TokenSpaceGuidCName,Pcd.TokenCName) == (item[0],item[1]) and item[2]:
+ FieldValues[item[2]] = StructurePcdInFdf[item]
+ for field in FieldValues:
+ if field not in Pcd.PcdFieldValueFromFdf:
+ Pcd.PcdFieldValueFromFdf[field] = ["","",""]
+ Pcd.PcdFieldValueFromFdf[field][0] = FieldValues[field]
+ Pcd.PcdFieldValueFromFdf[field][1] = os.path.relpath(fdfpcdlocation[(Pcd.TokenCName,Pcd.TokenSpaceGuidCName,field)][0],workspace)
+ Pcd.PcdFieldValueFromFdf[field][2] = fdfpcdlocation[(Pcd.TokenCName,Pcd.TokenSpaceGuidCName,field)][1]
+
+ return StruPcds
+
+ @staticmethod
+ def OverrideByComm(StruPcds):
+ StructurePcdInCom = OrderedDict()
+ for item in GlobalData.BuildOptionPcd:
+ if len(item) == 5 and (item[1], item[0]) in StruPcds:
+ StructurePcdInCom[(item[0], item[1], item[2] )] = (item[3], item[4])
+ GlobalPcds = {(item[0], item[1]) for item in StructurePcdInCom}
+ for Pcd in StruPcds.values():
+ if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) not in GlobalPcds:
+ continue
+ FieldValues = OrderedDict()
+ for item in StructurePcdInCom:
+ if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) == (item[0], item[1]) and item[2]:
+ FieldValues[item[2]] = StructurePcdInCom[item]
+ for field in FieldValues:
+ if field not in Pcd.PcdFieldValueFromComm:
+ Pcd.PcdFieldValueFromComm[field] = ["", "", ""]
+ Pcd.PcdFieldValueFromComm[field][0] = FieldValues[field][0]
+ Pcd.PcdFieldValueFromComm[field][1] = FieldValues[field][1][0]
+ Pcd.PcdFieldValueFromComm[field][2] = FieldValues[field][1][1]
+ return StruPcds
+
+ def OverrideByCommOverAll(self,AllPcds):
+ def CheckStructureInComm(commpcds):
+ if not commpcds:
+ return False
+ if len(commpcds[0]) == 5:
+ return True
+ return False
+ NoFiledValues = OrderedDict()
+ if CheckStructureInComm(GlobalData.BuildOptionPcd):
+ StructurePcdInCom = OrderedDict()
+ for item in GlobalData.BuildOptionPcd:
+ StructurePcdInCom[(item[0], item[1], item[2] )] = (item[3], item[4])
+ for item in StructurePcdInCom:
+ if not item[2]:
+ NoFiledValues[(item[0], item[1])] = StructurePcdInCom[item]
+ else:
+ for item in GlobalData.BuildOptionPcd:
+ NoFiledValues[(item[0], item[1])] = [item[2]]
+ for Guid, Name in NoFiledValues:
+ if (Name, Guid) in AllPcds:
+ Pcd = AllPcds.get((Name, Guid))
+ if isinstance(self._DecPcds.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName), None), StructurePcd):
+ self._DecPcds.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName)).PcdValueFromComm = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
+ else:
+ Pcd.PcdValueFromComm = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
+ Pcd.DefaultValue = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
+ for sku in Pcd.SkuInfoList:
+ SkuInfo = Pcd.SkuInfoList[sku]
+ if SkuInfo.DefaultValue:
+ SkuInfo.DefaultValue = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
+ else:
+ SkuInfo.HiiDefaultValue = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
+ for defaultstore in SkuInfo.DefaultStoreDict:
+ SkuInfo.DefaultStoreDict[defaultstore] = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
+ if Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII]]:
+ if Pcd.DatumType == TAB_VOID:
+ if not Pcd.MaxDatumSize:
+ Pcd.MaxDatumSize = '0'
+ CurrentSize = int(Pcd.MaxDatumSize, 16) if Pcd.MaxDatumSize.upper().startswith("0X") else int(Pcd.MaxDatumSize)
+ OptionSize = len((StringToArray(Pcd.PcdValueFromComm)).split(","))
+ MaxSize = max(CurrentSize, OptionSize)
+ Pcd.MaxDatumSize = str(MaxSize)
+ else:
+ PcdInDec = self.DecPcds.get((Name, Guid))
+ if PcdInDec:
+ PcdInDec.PcdValueFromComm = NoFiledValues[(Guid, Name)][0]
+ if PcdInDec.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
+ self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE],
+ self._PCD_TYPE_STRING_[MODEL_PCD_FEATURE_FLAG],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX]]:
+ self._Pcds[Name, Guid] = copy.deepcopy(PcdInDec)
+ self._Pcds[Name, Guid].DefaultValue = NoFiledValues[( Guid, Name)][0]
+ if PcdInDec.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX]]:
+ self._Pcds[Name, Guid].SkuInfoList = {TAB_DEFAULT:SkuInfoClass(TAB_DEFAULT, self.SkuIds[TAB_DEFAULT][0], '', '', '', '', '', NoFiledValues[( Guid, Name)][0])}
+ return AllPcds
+
+ def OverrideByFdfOverAll(self,AllPcds):
+
+ if GlobalData.gFdfParser is None:
+ return AllPcds
+ NoFiledValues = GlobalData.gFdfParser.Profile.PcdDict
+ for Name,Guid,Field in NoFiledValues:
+ if len(Field):
+ continue
+ Value = NoFiledValues[(Name,Guid,Field)]
+ if (Name,Guid) in AllPcds:
+ Pcd = AllPcds.get((Name,Guid))
+ if isinstance(self._DecPcds.get((Pcd.TokenCName,Pcd.TokenSpaceGuidCName), None),StructurePcd):
+ self._DecPcds.get((Pcd.TokenCName,Pcd.TokenSpaceGuidCName)).PcdValueFromComm = Value
+ else:
+ Pcd.PcdValueFromComm = Value
+ Pcd.DefaultValue = Value
+ for sku in Pcd.SkuInfoList:
+ SkuInfo = Pcd.SkuInfoList[sku]
+ if SkuInfo.DefaultValue:
+ SkuInfo.DefaultValue = Value
+ else:
+ SkuInfo.HiiDefaultValue = Value
+ for defaultstore in SkuInfo.DefaultStoreDict:
+ SkuInfo.DefaultStoreDict[defaultstore] = Value
+ if Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII]]:
+ if Pcd.DatumType == TAB_VOID:
+ if not Pcd.MaxDatumSize:
+ Pcd.MaxDatumSize = '0'
+ CurrentSize = int(Pcd.MaxDatumSize,16) if Pcd.MaxDatumSize.upper().startswith("0X") else int(Pcd.MaxDatumSize)
+ OptionSize = len((StringToArray(Pcd.PcdValueFromComm)).split(","))
+ MaxSize = max(CurrentSize, OptionSize)
+ Pcd.MaxDatumSize = str(MaxSize)
+ else:
+ PcdInDec = self.DecPcds.get((Name,Guid))
+ if PcdInDec:
+ PcdInDec.PcdValueFromFdf = Value
+ if PcdInDec.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
+ self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE],
+ self._PCD_TYPE_STRING_[MODEL_PCD_FEATURE_FLAG]]:
+ self._Pcds[Name, Guid] = copy.deepcopy(PcdInDec)
+ self._Pcds[Name, Guid].DefaultValue = Value
+ return AllPcds
+
+ def ParsePcdNameStruct(self,NamePart1,NamePart2):
+ TokenSpaceCName = PcdCName = DimensionAttr = Field = ""
+ if "." in NamePart1:
+ TokenSpaceCName, TempPcdCName = NamePart1.split(".")
+ if "[" in TempPcdCName:
+ PcdCName = TempPcdCName[:TempPcdCName.index("[")]
+ DimensionAttr = TempPcdCName[TempPcdCName.index("["):]
+ else:
+ PcdCName = TempPcdCName
+ Field = NamePart2
+ else:
+ TokenSpaceCName = NamePart1
+ if "[" in NamePart2:
+ PcdCName = NamePart2[:NamePart2.index("[")]
+ DimensionAttr = NamePart2[NamePart2.index("["):]
+ else:
+ PcdCName = NamePart2
+
+ return TokenSpaceCName,PcdCName,DimensionAttr,Field
+
+ def UpdateStructuredPcds(self, TypeList, AllPcds):
+
+ DynamicPcdType = [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_DEFAULT],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_VPD],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_DEFAULT],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_VPD]]
+
+ Pcds = AllPcds
+ DefaultStoreMgr = DefaultStore(self.DefaultStores)
+ SkuIds = self.SkuIds
+ self.SkuIdMgr.AvailableSkuIdSet.update({TAB_DEFAULT:0})
+ DefaultStores = {storename for pcdobj in AllPcds.values() for skuobj in pcdobj.SkuInfoList.values() for storename in skuobj.DefaultStoreDict}
+ DefaultStores.add(TAB_DEFAULT_STORES_DEFAULT)
+
+ S_PcdSet = []
+ # Find out all possible PCD candidates for self._Arch
+ RecordList = []
+
+ for Type in TypeList:
+ RecordList.extend(self._RawData[Type, self._Arch])
+
+ for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, default_store, Dummy4, Dummy5 in RecordList:
+ SkuName = SkuName.upper()
+ default_store = default_store.upper()
+ SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
+ if SkuName not in SkuIds:
+ continue
+ TCName,PCName,DimensionAttr,Field = self.ParsePcdNameStruct(TokenSpaceGuid, PcdCName)
+ pcd_in_dec = self._DecPcds.get((PCName,TCName), None)
+ if pcd_in_dec is None:
+ EdkLogger.error('build', PARSER_ERROR,
+ "Pcd (%s.%s) defined in DSC is not declared in DEC files. Arch: ['%s']" % (TCName, PCName, self._Arch),
+ File=self.MetaFile, Line = Dummy5)
+ if SkuName in SkuIds and ("." in TokenSpaceGuid or "[" in PcdCName):
+ if not isinstance (pcd_in_dec, StructurePcd):
+ EdkLogger.error('build', PARSER_ERROR,
+ "Pcd (%s.%s) is not declared as Structure PCD in DEC files. Arch: ['%s']" % (TCName, PCName, self._Arch),
+ File=self.MetaFile, Line = Dummy5)
+
+ S_PcdSet.append([ TCName,PCName,DimensionAttr,Field, SkuName, default_store, Dummy5, AnalyzePcdExpression(Setting)[0]])
+ ModuleScopeOverallValue = {}
+ for m in self.Modules.values():
+ mguid = m.Guid
+ if m.StrPcdSet:
+ S_PcdSet.extend(m.StrPcdSet)
+ mguid = m.StrPcdSet[0][4]
+ for (PCName,TCName) in m.StrPcdOverallValue:
+ Value, dsc_file, lineNo = m.StrPcdOverallValue[(PCName,TCName)]
+ ModuleScopeOverallValue.setdefault((PCName,TCName),{})[mguid] = Value, dsc_file, lineNo
+ # handle pcd value override
+ StrPcdSet = DscBuildData.GetStructurePcdInfo(S_PcdSet)
+ S_pcd_set = OrderedDict()
+ for str_pcd in StrPcdSet:
+ str_pcd_obj = Pcds.get((str_pcd[1], str_pcd[0]), None)
+ str_pcd_dec = self._DecPcds.get((str_pcd[1], str_pcd[0]), None)
+ str_pcd_obj_str = StructurePcd()
+ str_pcd_obj_str.copy(str_pcd_dec)
+ if str_pcd_obj:
+ str_pcd_obj_str.copy(str_pcd_obj)
+ if str_pcd_obj.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
+ str_pcd_obj_str.DefaultFromDSC = {skuname:{defaultstore: str_pcd_obj.SkuInfoList[skuname].DefaultStoreDict.get(defaultstore, str_pcd_obj.SkuInfoList[skuname].HiiDefaultValue) for defaultstore in DefaultStores} for skuname in str_pcd_obj.SkuInfoList}
+ else:
+ str_pcd_obj_str.DefaultFromDSC = {skuname:{defaultstore: str_pcd_obj.SkuInfoList[skuname].DefaultStoreDict.get(defaultstore, str_pcd_obj.SkuInfoList[skuname].DefaultValue) for defaultstore in DefaultStores} for skuname in str_pcd_obj.SkuInfoList}
+ for str_pcd_data in StrPcdSet[str_pcd]:
+ if str_pcd_data[4] in SkuIds:
+ str_pcd_obj_str.AddOverrideValue(str_pcd_data[3], str(str_pcd_data[7]), TAB_DEFAULT if str_pcd_data[4] == TAB_COMMON else str_pcd_data[4], TAB_DEFAULT_STORES_DEFAULT if str_pcd_data[5] == TAB_COMMON else str_pcd_data[5], self.MetaFile.File if self.WorkspaceDir not in self.MetaFile.File else self.MetaFile.File[len(self.WorkspaceDir) if self.WorkspaceDir.endswith(os.path.sep) else len(self.WorkspaceDir)+1:], LineNo=str_pcd_data[6],DimensionAttr = str_pcd_data[2])
+ elif GlobalData.gGuidPattern.match(str_pcd_data[4]):
+ str_pcd_obj_str.AddComponentOverrideValue(str_pcd_data[3], str(str_pcd_data[7]), str_pcd_data[4].replace("-","S"), self.MetaFile.File if self.WorkspaceDir not in self.MetaFile.File else self.MetaFile.File[len(self.WorkspaceDir) if self.WorkspaceDir.endswith(os.path.sep) else len(self.WorkspaceDir)+1:], LineNo=str_pcd_data[6],DimensionAttr = str_pcd_data[2])
+ PcdComponentValue = ModuleScopeOverallValue.get((str_pcd_obj_str.TokenCName,str_pcd_obj_str.TokenSpaceGuidCName))
+ for module_guid in PcdComponentValue:
+ str_pcd_obj_str.PcdValueFromComponents[module_guid.replace("-","S")] = PcdComponentValue[module_guid]
+ S_pcd_set[str_pcd[1], str_pcd[0]] = str_pcd_obj_str
+
+ # Add the Structure PCD that only defined in DEC, don't have override in DSC file
+ for Pcd in self.DecPcds:
+ if isinstance(self._DecPcds[Pcd], StructurePcd):
+ if Pcd not in S_pcd_set:
+ str_pcd_obj_str = StructurePcd()
+ str_pcd_obj_str.copy(self._DecPcds[Pcd])
+ str_pcd_obj = Pcds.get(Pcd, None)
+ if str_pcd_obj:
+ str_pcd_obj_str.copy(str_pcd_obj)
+ if str_pcd_obj.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
+ str_pcd_obj_str.DefaultFromDSC = {skuname:{defaultstore: str_pcd_obj.SkuInfoList[skuname].DefaultStoreDict.get(defaultstore, str_pcd_obj.SkuInfoList[skuname].HiiDefaultValue) for defaultstore in DefaultStores} for skuname in str_pcd_obj.SkuInfoList}
+ else:
+ str_pcd_obj_str.DefaultFromDSC = {skuname:{defaultstore: str_pcd_obj.SkuInfoList[skuname].DefaultStoreDict.get(defaultstore, str_pcd_obj.SkuInfoList[skuname].DefaultValue) for defaultstore in DefaultStores} for skuname in str_pcd_obj.SkuInfoList}
+ S_pcd_set[Pcd] = str_pcd_obj_str
+ if S_pcd_set:
+ GlobalData.gStructurePcd[self.Arch] = S_pcd_set.copy()
+ self.FilterStrcturePcd(S_pcd_set)
+ for stru_pcd in S_pcd_set.values():
+ for skuid in SkuIds:
+ if skuid in stru_pcd.SkuOverrideValues:
+ continue
+ nextskuid = self.SkuIdMgr.GetNextSkuId(skuid)
+ NoDefault = False
+ if skuid not in stru_pcd.SkuOverrideValues:
+ while nextskuid not in stru_pcd.SkuOverrideValues:
+ if nextskuid == TAB_DEFAULT:
+ NoDefault = True
+ break
+ nextskuid = self.SkuIdMgr.GetNextSkuId(nextskuid)
+ stru_pcd.SkuOverrideValues[skuid] = copy.deepcopy(stru_pcd.SkuOverrideValues[nextskuid]) if not NoDefault else copy.deepcopy({defaultstorename: stru_pcd.DefaultValues for defaultstorename in DefaultStores} if DefaultStores else {}) #{TAB_DEFAULT_STORES_DEFAULT:stru_pcd.DefaultValues})
+ if not NoDefault:
+ stru_pcd.ValueChain.add((skuid, ''))
+ if 'DEFAULT' in stru_pcd.SkuOverrideValues and not GlobalData.gPcdSkuOverrides.get((stru_pcd.TokenCName, stru_pcd.TokenSpaceGuidCName)):
+ GlobalData.gPcdSkuOverrides.update(
+ {(stru_pcd.TokenCName, stru_pcd.TokenSpaceGuidCName): {'DEFAULT':stru_pcd.SkuOverrideValues['DEFAULT']}})
+ if stru_pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
+ for skuid in SkuIds:
+ nextskuid = skuid
+ NoDefault = False
+ if skuid not in stru_pcd.SkuOverrideValues:
+ while nextskuid not in stru_pcd.SkuOverrideValues:
+ if nextskuid == TAB_DEFAULT:
+ NoDefault = True
+ break
+ nextskuid = self.SkuIdMgr.GetNextSkuId(nextskuid)
+ if NoDefault:
+ continue
+ PcdDefaultStoreSet = set(defaultstorename for defaultstorename in stru_pcd.SkuOverrideValues[nextskuid])
+ mindefaultstorename = DefaultStoreMgr.GetMin(PcdDefaultStoreSet)
+
+ for defaultstoreid in DefaultStores:
+ if defaultstoreid not in stru_pcd.SkuOverrideValues[skuid]:
+ stru_pcd.SkuOverrideValues[skuid][defaultstoreid] = CopyDict(stru_pcd.SkuOverrideValues[nextskuid][mindefaultstorename])
+ stru_pcd.ValueChain.add((skuid, defaultstoreid))
+ S_pcd_set = DscBuildData.OverrideByFdf(S_pcd_set,self.WorkspaceDir)
+ S_pcd_set = DscBuildData.OverrideByComm(S_pcd_set)
+
+ # Create a tool to caculate structure pcd value
+ Str_Pcd_Values = self.GenerateByteArrayValue(S_pcd_set)
+
+ if Str_Pcd_Values:
+ for (skuname, StoreName, PcdGuid, PcdName, PcdValue) in Str_Pcd_Values:
+ str_pcd_obj = S_pcd_set.get((PcdName, PcdGuid))
+ if str_pcd_obj is None:
+ print(PcdName, PcdGuid)
+ raise
+ if str_pcd_obj.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
+ if skuname not in str_pcd_obj.SkuInfoList:
+ str_pcd_obj.SkuInfoList[skuname] = SkuInfoClass(SkuIdName=skuname, SkuId=self.SkuIds[skuname][0], HiiDefaultValue=PcdValue, DefaultStore = {StoreName:PcdValue})
+ else:
+ str_pcd_obj.SkuInfoList[skuname].HiiDefaultValue = PcdValue
+ str_pcd_obj.SkuInfoList[skuname].DefaultStoreDict.update({StoreName:PcdValue})
+ elif str_pcd_obj.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
+ self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
+ if skuname in (self.SkuIdMgr.SystemSkuId, TAB_DEFAULT, TAB_COMMON):
+ str_pcd_obj.DefaultValue = PcdValue
+ else:
+ #Module Scope Structure Pcd
+ moduleguid = skuname.replace("S","-")
+ if GlobalData.gGuidPattern.match(moduleguid):
+ for component in self.Modules.values():
+ if component.Guid == moduleguid:
+ component.Pcds[(PcdName, PcdGuid)].DefaultValue = PcdValue
+
+ else:
+ if skuname not in str_pcd_obj.SkuInfoList:
+ nextskuid = self.SkuIdMgr.GetNextSkuId(skuname)
+ NoDefault = False
+ while nextskuid not in str_pcd_obj.SkuInfoList:
+ if nextskuid == TAB_DEFAULT:
+ NoDefault = True
+ break
+ nextskuid = self.SkuIdMgr.GetNextSkuId(nextskuid)
+ str_pcd_obj.SkuInfoList[skuname] = copy.deepcopy(str_pcd_obj.SkuInfoList[nextskuid]) if not NoDefault else SkuInfoClass(SkuIdName=skuname, SkuId=self.SkuIds[skuname][0], DefaultValue=PcdValue)
+ str_pcd_obj.SkuInfoList[skuname].SkuId = self.SkuIds[skuname][0]
+ str_pcd_obj.SkuInfoList[skuname].SkuIdName = skuname
+ else:
+ str_pcd_obj.SkuInfoList[skuname].DefaultValue = PcdValue
+ for str_pcd_obj in S_pcd_set.values():
+ if str_pcd_obj.Type not in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
+ continue
+ PcdDefaultStoreSet = set(defaultstorename for skuobj in str_pcd_obj.SkuInfoList.values() for defaultstorename in skuobj.DefaultStoreDict)
+ DefaultStoreObj = DefaultStore(self._GetDefaultStores())
+ mindefaultstorename = DefaultStoreObj.GetMin(PcdDefaultStoreSet)
+ str_pcd_obj.SkuInfoList[self.SkuIdMgr.SystemSkuId].HiiDefaultValue = str_pcd_obj.SkuInfoList[self.SkuIdMgr.SystemSkuId].DefaultStoreDict[mindefaultstorename]
+
+ for str_pcd_obj in S_pcd_set.values():
+
+ str_pcd_obj.MaxDatumSize = DscBuildData.GetStructurePcdMaxSize(str_pcd_obj)
+ Pcds[str_pcd_obj.TokenCName, str_pcd_obj.TokenSpaceGuidCName] = str_pcd_obj
+ Pcds[str_pcd_obj.TokenCName, str_pcd_obj.TokenSpaceGuidCName].CustomAttribute['IsStru']=True
+
+ for pcdkey in Pcds:
+ pcd = Pcds[pcdkey]
+ if TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
+ pcd.SkuInfoList[TAB_DEFAULT] = pcd.SkuInfoList[TAB_COMMON]
+ del pcd.SkuInfoList[TAB_COMMON]
+ elif TAB_DEFAULT in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
+ del pcd.SkuInfoList[TAB_COMMON]
+
+ list(map(self.FilterSkuSettings, [Pcds[pcdkey] for pcdkey in Pcds if Pcds[pcdkey].Type in DynamicPcdType]))
+ return Pcds
+ @cached_property
+ def PlatformUsedPcds(self):
+ FdfInfList = []
+ if GlobalData.gFdfParser:
+ FdfInfList = GlobalData.gFdfParser.Profile.InfList
+ FdfModuleList = [PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch=self._Arch) for Inf in FdfInfList]
+ AllModulePcds = set()
+ ModuleSet = set(list(self._Modules.keys()) + FdfModuleList)
+ for ModuleFile in ModuleSet:
+ ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]
+ AllModulePcds = AllModulePcds | ModuleData.PcdsName
+ for ModuleFile in self.LibraryInstances:
+ ModuleData = self._Bdb.CreateBuildObject(ModuleFile, self._Arch, self._Target, self._Toolchain)
+ AllModulePcds = AllModulePcds | ModuleData.PcdsName
+ return AllModulePcds
+
+ #Filter the StrucutrePcd that is not used by any module in dsc file and fdf file.
+ def FilterStrcturePcd(self, S_pcd_set):
+ UnusedStruPcds = set(S_pcd_set.keys()) - self.PlatformUsedPcds
+ for (Token, TokenSpaceGuid) in UnusedStruPcds:
+ del S_pcd_set[(Token, TokenSpaceGuid)]
+
+ ## Retrieve non-dynamic PCD settings
+ #
+ # @param Type PCD type
+ #
+ # @retval a dict object contains settings of given PCD type
+ #
+ def _GetPcd(self, Type):
+ Pcds = OrderedDict()
+ #
+ # tdict is a special dict kind of type, used for selecting correct
+ # PCD settings for certain ARCH
+ #
+ AvailableSkuIdSet = copy.copy(self.SkuIds)
+
+ PcdDict = tdict(True, 4)
+ PcdList = []
+ # Find out all possible PCD candidates for self._Arch
+ RecordList = self._RawData[Type, self._Arch]
+ PcdValueDict = OrderedDict()
+ for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4, Dummy5 in RecordList:
+ SkuName = SkuName.upper()
+ SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
+ if SkuName not in AvailableSkuIdSet:
+ EdkLogger.error('build ', PARAMETER_INVALID, 'Sku %s is not defined in [SkuIds] section' % SkuName,
+ File=self.MetaFile, Line=Dummy5)
+ if SkuName in (self.SkuIdMgr.SystemSkuId, TAB_DEFAULT, TAB_COMMON):
+ if "." not in TokenSpaceGuid and "[" not in PcdCName and (PcdCName, TokenSpaceGuid, SkuName, Dummy5) not in PcdList:
+ PcdList.append((PcdCName, TokenSpaceGuid, SkuName, Dummy5))
+ PcdDict[Arch, PcdCName, TokenSpaceGuid, SkuName] = Setting
+
+ for PcdCName, TokenSpaceGuid, SkuName, Dummy4 in PcdList:
+ Setting = PcdDict[self._Arch, PcdCName, TokenSpaceGuid, SkuName]
+ if Setting is None:
+ continue
+ PcdValue, DatumType, MaxDatumSize = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
+ if MaxDatumSize:
+ if int(MaxDatumSize, 0) > 0xFFFF:
+ EdkLogger.error('build', FORMAT_INVALID, "The size value must not exceed the maximum value of 0xFFFF (UINT16) for %s." % ".".join((TokenSpaceGuid, PcdCName)),
+ File=self.MetaFile, Line=Dummy4)
+ if int(MaxDatumSize, 0) < 0:
+ EdkLogger.error('build', FORMAT_INVALID, "The size value can't be set to negative value for %s." % ".".join((TokenSpaceGuid, PcdCName)),
+ File=self.MetaFile, Line=Dummy4)
+ if (PcdCName, TokenSpaceGuid) in PcdValueDict:
+ PcdValueDict[PcdCName, TokenSpaceGuid][SkuName] = (PcdValue, DatumType, MaxDatumSize,Dummy4)
+ else:
+ PcdValueDict[PcdCName, TokenSpaceGuid] = {SkuName:(PcdValue, DatumType, MaxDatumSize,Dummy4)}
+
+ for ((PcdCName, TokenSpaceGuid), PcdSetting) in PcdValueDict.items():
+ if self.SkuIdMgr.SystemSkuId in PcdSetting:
+ PcdValue, DatumType, MaxDatumSize,_ = PcdSetting[self.SkuIdMgr.SystemSkuId]
+ elif TAB_DEFAULT in PcdSetting:
+ PcdValue, DatumType, MaxDatumSize,_ = PcdSetting[TAB_DEFAULT]
+ elif TAB_COMMON in PcdSetting:
+ PcdValue, DatumType, MaxDatumSize,_ = PcdSetting[TAB_COMMON]
+ else:
+ PcdValue = None
+ DatumType = None
+ MaxDatumSize = None
+
+ Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
+ PcdCName,
+ TokenSpaceGuid,
+ self._PCD_TYPE_STRING_[Type],
+ DatumType,
+ PcdValue,
+ '',
+ MaxDatumSize,
+ {},
+ False,
+ None,
+ IsDsc=True)
+ for SkuName in PcdValueDict[PcdCName, TokenSpaceGuid]:
+ Settings = PcdValueDict[PcdCName, TokenSpaceGuid][SkuName]
+ if SkuName not in Pcds[PcdCName, TokenSpaceGuid].DscRawValue:
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName] = {}
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName] = {}
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName][TAB_DEFAULT_STORES_DEFAULT] = Settings[0]
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName][TAB_DEFAULT_STORES_DEFAULT] = (self.MetaFile.File,Settings[3])
+ return Pcds
+
+ @staticmethod
+ def GetStructurePcdMaxSize(str_pcd):
+ pcd_default_value = str_pcd.DefaultValue
+ sku_values = [skuobj.HiiDefaultValue if str_pcd.Type in [DscBuildData._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], DscBuildData._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]] else skuobj.DefaultValue for skuobj in str_pcd.SkuInfoList.values()]
+ sku_values.append(pcd_default_value)
+
+ def get_length(value):
+ Value = value.strip()
+ if len(value) > 1:
+ if Value.startswith(TAB_GUID) and Value.endswith(')'):
+ return 16
+ if Value.startswith('L"') and Value.endswith('"'):
+ return len(Value[2:-1])
+ if Value[0] == '"' and Value[-1] == '"':
+ return len(Value) - 2
+ if Value.strip().startswith("{CODE("):
+ tmpValue = RemoveCComments(Value)
+ return len(tmpValue.split(","))
+ if (Value[0] == '{' and Value[-1] == '}'):
+ return len(Value.split(","))
+ if Value.startswith("L'") and Value.endswith("'") and len(list(Value[2:-1])) > 1:
+ return len(list(Value[2:-1]))
+ if Value[0] == "'" and Value[-1] == "'" and len(list(Value[1:-1])) > 1:
+ return len(Value) - 2
+ return len(Value)
+
+ return str(max(get_length(item) for item in sku_values))
+
+ @staticmethod
+ def ExecuteCommand (Command):
+ try:
+ Process = subprocess.Popen(Command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ except:
+ EdkLogger.error('Build', COMMAND_FAILURE, 'Can not execute command: %s' % Command)
+ Result = Process.communicate()
+ return Process.returncode, Result[0].decode(errors='ignore'), Result[1].decode(errors='ignore')
+
+ @staticmethod
+ def IntToCString(Value, ValueSize):
+ Result = '"'
+ if not isinstance (Value, str):
+ for Index in range(0, ValueSize):
+ Result = Result + '\\x%02x' % (Value & 0xff)
+ Value = Value >> 8
+ Result = Result + '"'
+ return Result
+
+ def GenerateSizeFunction(self, Pcd):
+ CApp = "// Default Value in Dec \n"
+ CApp = CApp + "void Cal_%s_%s_Size(UINT32 *Size){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+
+ if Pcd.IsArray() and Pcd.Capacity[-1] != "-1":
+ CApp += " *Size = (sizeof (%s) > *Size ? sizeof (%s) : *Size);\n" % (Pcd.DatumType,Pcd.DatumType)
+ else:
+ if "{CODE(" in Pcd.DefaultValueFromDec:
+ CApp += " *Size = (sizeof (%s_%s_INIT_Value) > *Size ? sizeof (%s_%s_INIT_Value) : *Size);\n" % (Pcd.TokenSpaceGuidCName,Pcd.TokenCName,Pcd.TokenSpaceGuidCName,Pcd.TokenCName)
+ if Pcd.Type in PCD_DYNAMIC_TYPE_SET | PCD_DYNAMIC_EX_TYPE_SET:
+ for skuname in Pcd.SkuInfoList:
+ skuobj = Pcd.SkuInfoList[skuname]
+ if skuobj.VariableName:
+ for defaultstore in skuobj.DefaultStoreDict:
+ pcddef = self.GetPcdDscRawDefaultValue(Pcd,skuname,defaultstore)
+ if pcddef:
+ if "{CODE(" in pcddef:
+ CApp += " *Size = (sizeof (%s_%s_%s_%s_Value) > *Size ? sizeof (%s_%s_%s_%s_Value) : *Size);\n" % (Pcd.TokenSpaceGuidCName,Pcd.TokenCName,skuname,defaultstore,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,skuname,defaultstore)
+ else:
+ CApp += " *Size = %s > *Size ? %s : *Size;\n" % (self.GetStructurePcdMaxSize(Pcd),self.GetStructurePcdMaxSize(Pcd))
+ else:
+ pcddef = self.GetPcdDscRawDefaultValue(Pcd,skuname,TAB_DEFAULT_STORES_DEFAULT)
+ if pcddef:
+ if "{CODE(" in pcddef:
+ CApp += " *Size = (sizeof (%s_%s_%s_%s_Value) > *Size ? sizeof (%s_%s_%s_%s_Value) : *Size);\n" % (Pcd.TokenSpaceGuidCName,Pcd.TokenCName,skuname,TAB_DEFAULT_STORES_DEFAULT,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,skuname,TAB_DEFAULT_STORES_DEFAULT)
+ else:
+ CApp += " *Size = %s > *Size ? %s : *Size;\n" % (self.GetStructurePcdMaxSize(Pcd),self.GetStructurePcdMaxSize(Pcd))
+ else:
+ pcddef = self.GetPcdDscRawDefaultValue(Pcd,TAB_DEFAULT,TAB_DEFAULT_STORES_DEFAULT)
+ if pcddef:
+ if "{CODE(" in pcddef:
+ CApp += " *Size = (sizeof (%s_%s_%s_%s_Value) > *Size ? sizeof (%s_%s_%s_%s_Value) : *Size);\n" % (Pcd.TokenSpaceGuidCName,Pcd.TokenCName,TAB_DEFAULT,TAB_DEFAULT_STORES_DEFAULT,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,TAB_DEFAULT,TAB_DEFAULT_STORES_DEFAULT)
+ else:
+ CApp += " *Size = %s > *Size ? %s : *Size;\n" % (self.GetStructurePcdMaxSize(Pcd),self.GetStructurePcdMaxSize(Pcd))
+ ActualCap = []
+ for index in Pcd.DefaultValues:
+ if index:
+ ActualCap.append(index)
+ FieldList = Pcd.DefaultValues[index]
+ if not FieldList:
+ continue
+ for FieldName in FieldList:
+ FieldName = "." + FieldName
+ IsArray = _IsFieldValueAnArray(FieldList[FieldName.strip(".")][0])
+ if IsArray and not (FieldList[FieldName.strip(".")][0].startswith('{GUID') and FieldList[FieldName.strip(".")][0].endswith('}')):
+ try:
+ Value = ValueExpressionEx(FieldList[FieldName.strip(".")][0], TAB_VOID, self._GuidDict)(True)
+ except BadExpression:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
+ (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName.strip('.'))), FieldList[FieldName.strip(".")][1], FieldList[FieldName.strip(".")][2]))
+ Value, ValueSize = ParseFieldValue(Value)
+ if not Pcd.IsArray():
+ CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d / __ARRAY_ELEMENT_SIZE(%s, %s) + ((%d %% __ARRAY_ELEMENT_SIZE(%s, %s)) ? 1 : 0)); // From %s Line %d Value %s \n' % (Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), FieldList[FieldName.strip(".")][1], FieldList[FieldName.strip(".")][2], FieldList[FieldName.strip(".")][0]);
+ else:
+ NewFieldName = ''
+ FieldName_ori = FieldName.strip('.')
+ while '[' in FieldName:
+ NewFieldName = NewFieldName + FieldName.split('[', 1)[0] + '[0]'
+ Array_Index = int(FieldName.split('[', 1)[1].split(']', 1)[0])
+ FieldName = FieldName.split(']', 1)[1]
+ FieldName = NewFieldName + FieldName
+ while '[' in FieldName and not Pcd.IsArray():
+ FieldName = FieldName.rsplit('[', 1)[0]
+ CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d); // From %s Line %d Value %s\n' % (Pcd.DatumType, FieldName.strip("."), Array_Index + 1, FieldList[FieldName_ori][1], FieldList[FieldName_ori][2], FieldList[FieldName_ori][0])
+ for skuname in Pcd.SkuOverrideValues:
+ if skuname == TAB_COMMON:
+ continue
+ for defaultstorenameitem in Pcd.SkuOverrideValues[skuname]:
+ CApp = CApp + "// SkuName: %s, DefaultStoreName: %s \n" % (skuname, defaultstorenameitem)
+ for index in Pcd.SkuOverrideValues[skuname][defaultstorenameitem]:
+ if index:
+ ActualCap.append(index)
+ for FieldList in [Pcd.SkuOverrideValues[skuname][defaultstorenameitem][index]]:
+ if not FieldList:
+ continue
+ for FieldName in FieldList:
+ FieldName = "." + FieldName
+ IsArray = _IsFieldValueAnArray(FieldList[FieldName.strip(".")][0])
+ if IsArray and not (FieldList[FieldName.strip(".")][0].startswith('{GUID') and FieldList[FieldName.strip(".")][0].endswith('}')):
+ try:
+ Value = ValueExpressionEx(FieldList[FieldName.strip(".")][0], TAB_VOID, self._GuidDict)(True)
+ except BadExpression:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
+ (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName.strip('.'))), FieldList[FieldName.strip(".")][1], FieldList[FieldName.strip(".")][2]))
+ Value, ValueSize = ParseFieldValue(Value)
+ if not Pcd.IsArray():
+ CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d / __ARRAY_ELEMENT_SIZE(%s, %s) + ((%d %% __ARRAY_ELEMENT_SIZE(%s, %s)) ? 1 : 0)); // From %s Line %d Value %s\n' % (Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), FieldList[FieldName.strip(".")][1], FieldList[FieldName.strip(".")][2], FieldList[FieldName.strip(".")][0]);
+ else:
+ NewFieldName = ''
+ FieldName_ori = FieldName.strip('.')
+ while '[' in FieldName:
+ NewFieldName = NewFieldName + FieldName.split('[', 1)[0] + '[0]'
+ Array_Index = int(FieldName.split('[', 1)[1].split(']', 1)[0])
+ FieldName = FieldName.split(']', 1)[1]
+ FieldName = NewFieldName + FieldName
+ while '[' in FieldName and not Pcd.IsArray():
+ FieldName = FieldName.rsplit('[', 1)[0]
+ CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d); // From %s Line %d Value %s \n' % (Pcd.DatumType, FieldName.strip("."), Array_Index + 1, FieldList[FieldName_ori][1], FieldList[FieldName_ori][2], FieldList[FieldName_ori][0])
+ if Pcd.PcdFieldValueFromFdf:
+ CApp = CApp + "// From fdf \n"
+ for FieldName in Pcd.PcdFieldValueFromFdf:
+ FieldName = "." + FieldName
+ IsArray = _IsFieldValueAnArray(Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][0])
+ if IsArray and not (Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][0].startswith('{GUID') and Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][0].endswith('}')):
+ try:
+ Value = ValueExpressionEx(Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][0], TAB_VOID, self._GuidDict)(True)
+ except BadExpression:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
+ (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName.strip('.'))), Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][1], Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][2]))
+ Value, ValueSize = ParseFieldValue(Value)
+ if not Pcd.IsArray():
+ CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d / __ARRAY_ELEMENT_SIZE(%s, %s) + ((%d %% __ARRAY_ELEMENT_SIZE(%s, %s)) ? 1 : 0)); // From %s Line %d Value %s\n' % (Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][1], Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][2], Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][0]);
+ else:
+ NewFieldName = ''
+ FieldName_ori = FieldName.strip('.')
+ while '[' in FieldName:
+ NewFieldName = NewFieldName + FieldName.split('[', 1)[0] + '[0]'
+ Array_Index = int(FieldName.split('[', 1)[1].split(']', 1)[0])
+ FieldName = FieldName.split(']', 1)[1]
+ FieldName = NewFieldName + FieldName
+ while '[' in FieldName:
+ FieldName = FieldName.rsplit('[', 1)[0]
+ CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d); // From %s Line %s Value %s \n' % (Pcd.DatumType, FieldName.strip("."), Array_Index + 1, Pcd.PcdFieldValueFromFdf[FieldName_ori][1], Pcd.PcdFieldValueFromFdf[FieldName_ori][2], Pcd.PcdFieldValueFromFdf[FieldName_ori][0])
+ if Pcd.PcdFieldValueFromComm:
+ CApp = CApp + "// From Command Line \n"
+ for FieldName in Pcd.PcdFieldValueFromComm:
+ FieldName = "." + FieldName
+ IsArray = _IsFieldValueAnArray(Pcd.PcdFieldValueFromComm[FieldName.strip(".")][0])
+ if IsArray and not (Pcd.PcdFieldValueFromComm[FieldName.strip(".")][0].startswith('{GUID') and Pcd.PcdFieldValueFromComm[FieldName.strip(".")][0].endswith('}')):
+ try:
+ Value = ValueExpressionEx(Pcd.PcdFieldValueFromComm[FieldName.strip(".")][0], TAB_VOID, self._GuidDict)(True)
+ except BadExpression:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
+ (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName.strip('.'))), Pcd.PcdFieldValueFromComm[FieldName.strip(".")][1], Pcd.PcdFieldValueFromComm[FieldName.strip(".")][2]))
+ Value, ValueSize = ParseFieldValue(Value)
+ if not Pcd.IsArray():
+ CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d / __ARRAY_ELEMENT_SIZE(%s, %s) + ((%d %% __ARRAY_ELEMENT_SIZE(%s, %s)) ? 1 : 0)); // From %s Line %d Value %s\n' % (Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), Pcd.PcdFieldValueFromComm[FieldName.strip(".")][1], Pcd.PcdFieldValueFromComm[FieldName.strip(".")][2], Pcd.PcdFieldValueFromComm[FieldName.strip(".")][0]);
+ else:
+ NewFieldName = ''
+ FieldName_ori = FieldName.strip('.')
+ while '[' in FieldName:
+ NewFieldName = NewFieldName + FieldName.split('[', 1)[0] + '[0]'
+ Array_Index = int(FieldName.split('[', 1)[1].split(']', 1)[0])
+ FieldName = FieldName.split(']', 1)[1]
+ FieldName = NewFieldName + FieldName
+ while '[' in FieldName and not Pcd.IsArray():
+ FieldName = FieldName.rsplit('[', 1)[0]
+ CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d); // From %s Line %d Value %s \n' % (Pcd.DatumType, FieldName.strip("."), Array_Index + 1, Pcd.PcdFieldValueFromComm[FieldName_ori][1], Pcd.PcdFieldValueFromComm[FieldName_ori][2], Pcd.PcdFieldValueFromComm[FieldName_ori][0])
+ if Pcd.GetPcdMaxSize():
+ CApp = CApp + " *Size = (%d > *Size ? %d : *Size); // The Pcd maxsize is %d \n" % (Pcd.GetPcdMaxSize(), Pcd.GetPcdMaxSize(), Pcd.GetPcdMaxSize())
+ ArraySizeByAssign = self.CalculateActualCap(ActualCap)
+ if ArraySizeByAssign > 1:
+ CApp = CApp + " *Size = (%d > *Size ? %d : *Size); \n" % (ArraySizeByAssign, ArraySizeByAssign)
+ CApp = CApp + "}\n"
+ return CApp
+ def CalculateActualCap(self,ActualCap):
+ if not ActualCap:
+ return 1
+ maxsize = 1
+ for item in ActualCap:
+ index_elements = ArrayIndex.findall(item)
+ rt = 1
+ for index_e in index_elements:
+ index_num = index_e.lstrip("[").rstrip("]").strip()
+ if not index_num:
+ # Not support flexiable pcd array assignment
+ return 1
+ index_num = int(index_num,16) if index_num.startswith(("0x","0X")) else int(index_num)
+ rt = rt * (index_num+1)
+ if rt >maxsize:
+ maxsize = rt
+
+ return maxsize
+
+ @staticmethod
+ def GenerateSizeStatments(Pcd,skuname,defaultstorename):
+ if Pcd.IsArray():
+ r_datatype = [Pcd.BaseDatumType]
+ lastoneisEmpty = False
+ for dem in Pcd.Capacity:
+ if lastoneisEmpty:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. " %
+ (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))))
+ if dem == '0' or dem == "-1":
+ r_datatype.append("[1]")
+ lastoneisEmpty = True
+ else:
+ r_datatype.append("[" + dem + "]")
+
+ if Pcd.Type in [MODEL_PCD_DYNAMIC_EX_HII, MODEL_PCD_DYNAMIC_HII]:
+ PcdDefValue = Pcd.SkuInfoList.get(skuname).DefaultStoreDict.get(defaultstorename)
+ elif Pcd.Type in [MODEL_PCD_DYNAMIC_EX_DEFAULT,MODEL_PCD_DYNAMIC_VPD,MODEL_PCD_DYNAMIC_DEFAULT,MODEL_PCD_DYNAMIC_EX_VPD]:
+ PcdDefValue = Pcd.SkuInfoList.get(skuname).DefaultValue
+ else:
+ PcdDefValue = Pcd.DefaultValue
+ if lastoneisEmpty:
+ if "{CODE(" not in PcdDefValue:
+ sizebasevalue_plus = "(%s / sizeof(%s) + 1)" % ((DscBuildData.GetStructurePcdMaxSize(Pcd), Pcd.BaseDatumType))
+ sizebasevalue = "(%s / sizeof(%s))" % ((DscBuildData.GetStructurePcdMaxSize(Pcd), Pcd.BaseDatumType))
+ sizeof = "sizeof(%s)" % Pcd.BaseDatumType
+ CApp = ' int ArraySize = %s %% %s ? %s : %s ;\n' % ( (DscBuildData.GetStructurePcdMaxSize(Pcd), sizeof, sizebasevalue_plus, sizebasevalue))
+ CApp += ' Size = ArraySize * sizeof(%s); \n' % Pcd.BaseDatumType
+ else:
+ CApp = " Size = 0;\n"
+ else:
+ CApp = ' Size = sizeof(%s);\n' % ("".join(r_datatype) )
+ else:
+ CApp = ' Size = sizeof(%s);\n' % (Pcd.DatumType)
+ CApp = CApp + ' Cal_%s_%s_Size(&Size);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ return CApp
+
+ def GetIndicator(self,index,FieldName,Pcd):
+ def cleanupindex(indexstr):
+ return indexstr.strip("[").strip("]").strip()
+ index_elements = ArrayIndex.findall(index)
+ pcd_capacity = Pcd.Capacity
+ if index:
+ indicator = "(Pcd"
+ if len(pcd_capacity)>2:
+ for i in range(0,len(index_elements)):
+ index_ele = index_elements[i]
+ index_num = index_ele.strip("[").strip("]").strip()
+ if i == len(index_elements) -2:
+ indicator += "+ %d*Size/sizeof(%s)/%d + %s)" %(int(cleanupindex(index_elements[i+1])),Pcd.BaseDatumType,reduce(lambda x,y: int(x)*int(y),pcd_capacity[:-1]), cleanupindex(index_elements[i]))
+ break
+ else:
+ indicator += " + %d*%s*Size/sizeof(%s)/%d" %(int(cleanupindex(index_elements[i])),reduce(lambda x,y: int(x)*int(y),pcd_capacity[i+1:-1]),Pcd.BaseDatumType,reduce(lambda x,y: int(x)*int(y),pcd_capacity[:-1]))
+ elif len(pcd_capacity) == 2:
+ indicator += "+ %d*Size/sizeof(%s)/%d + %s)" %(int(cleanupindex(index_elements[0])),Pcd.BaseDatumType,int(pcd_capacity[0]), index_elements[1].strip("[").strip("]").strip())
+ elif len(pcd_capacity) == 1:
+ index_ele = index_elements[0]
+ index_num = index_ele.strip("[").strip("]").strip()
+ indicator += " + %s)" % (index_num)
+ else:
+ indicator = "Pcd"
+ if FieldName:
+ indicator += "->" + FieldName
+ return indicator
+
+ def GetStarNum(self,Pcd):
+ if not Pcd.IsArray():
+ return 1
+ elif Pcd.IsSimpleTypeArray():
+ return len(Pcd.Capacity)
+ else:
+ return len(Pcd.Capacity) + 1
+ def GenerateDefaultValueAssignFunction(self, Pcd):
+ CApp = "// Default value in Dec \n"
+ CApp = CApp + "void Assign_%s_%s_Default_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.BaseDatumType)
+ CApp = CApp + ' UINT32 FieldSize;\n'
+ CApp = CApp + ' CHAR8 *Value;\n'
+ CApp = CApp + ' UINT32 PcdArraySize;\n'
+ DefaultValueFromDec = Pcd.DefaultValueFromDec
+ IsArray = _IsFieldValueAnArray(Pcd.DefaultValueFromDec)
+ if IsArray:
+ try:
+ DefaultValueFromDec = ValueExpressionEx(Pcd.DefaultValueFromDec, TAB_VOID)(True)
+ except BadExpression:
+ EdkLogger.error("Build", FORMAT_INVALID, "Invalid value format for %s.%s, from DEC: %s" %
+ (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, DefaultValueFromDec))
+ DefaultValueFromDec = StringToArray(DefaultValueFromDec)
+ Value, ValueSize = ParseFieldValue (DefaultValueFromDec)
+ if IsArray:
+ #
+ # Use memcpy() to copy value into field
+ #
+ if Pcd.IsArray():
+ pcdarraysize = Pcd.PcdArraySize()
+ if "{CODE(" in Pcd.DefaultValueFromDec:
+ if Pcd.Capacity[-1] != "-1":
+ CApp = CApp + '__STATIC_ASSERT(sizeof(%s_%s_INIT_Value) < %d * sizeof(%s), "Pcd %s.%s Value in Dec exceed the array capability %s"); // From %s Line %s \n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,Pcd.DefaultValueFromDecInfo[0],Pcd.DefaultValueFromDecInfo[1])
+ CApp = CApp + ' PcdArraySize = sizeof(%s_%s_INIT_Value);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ CApp = CApp + ' memcpy (Pcd, %s_%s_INIT_Value,PcdArraySize);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ else:
+ if Pcd.Capacity[-1] != "-1":
+ CApp = CApp + '__STATIC_ASSERT(%d < %d * sizeof(%s), "Pcd %s.%s Value in Dec exceed the array capability %s"); // From %s Line %s \n' % (ValueSize,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,Pcd.DefaultValueFromDecInfo[0],Pcd.DefaultValueFromDecInfo[1])
+ CApp = CApp + ' PcdArraySize = %d;\n' % ValueSize
+ CApp = CApp + ' Value = %s; // From DEC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DefaultValueFromDec)
+ CApp = CApp + ' memcpy (Pcd, Value, PcdArraySize);\n'
+ else:
+ if "{CODE(" in Pcd.DefaultValueFromDec:
+ CApp = CApp + ' PcdArraySize = sizeof(%s_%s_INIT_Value);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ CApp = CApp + ' memcpy (Pcd, &%s_%s_INIT_Value,PcdArraySize);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ else:
+ CApp = CApp + ' Value = %s; // From DEC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DefaultValueFromDec)
+ CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
+ elif isinstance(Value, str):
+ CApp = CApp + ' Pcd = %s; // From DEC Default Value %s\n' % (Value, Pcd.DefaultValueFromDec)
+ for index in Pcd.DefaultValues:
+ FieldList = Pcd.DefaultValues[index]
+ if not FieldList:
+ continue
+ for FieldName in FieldList:
+ IsArray = _IsFieldValueAnArray(FieldList[FieldName][0])
+ if IsArray:
+ try:
+ FieldList[FieldName][0] = ValueExpressionEx(FieldList[FieldName][0], TAB_VOID, self._GuidDict)(True)
+ except BadExpression:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
+ (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
+
+ try:
+ Value, ValueSize = ParseFieldValue (FieldList[FieldName][0])
+ except Exception:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
+
+ indicator = self.GetIndicator(index, FieldName,Pcd)
+ if IsArray:
+ #
+ # Use memcpy() to copy value into field
+ #
+ CApp = CApp + ' FieldSize = __FIELD_SIZE(%s, %s);\n' % (Pcd.BaseDatumType, FieldName)
+ CApp = CApp + ' Value = %s; // From %s Line %d Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + ' __STATIC_ASSERT((__FIELD_SIZE(%s, %s) >= %d) || (__FIELD_SIZE(%s, %s) == 0), "Input buffer exceeds the buffer array"); // From %s Line %d Value %s\n' % (Pcd.BaseDatumType, FieldName, ValueSize, Pcd.BaseDatumType, FieldName, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + ' memcpy (&%s, Value, (FieldSize > 0 && FieldSize < %d) ? FieldSize : %d);\n' % (indicator, ValueSize, ValueSize)
+ elif isinstance(Value, str):
+ CApp = CApp + ' %s = %s; // From %s Line %d Value %s\n' % (indicator, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ else:
+ if '[' in FieldName and ']' in FieldName:
+ Index = int(FieldName.split('[')[1].split(']')[0])
+ CApp = CApp + ' __STATIC_ASSERT((%d < __ARRAY_SIZE(Pcd->%s)) || (__ARRAY_SIZE(Pcd->%s) == 0), "array index exceeds the array number"); // From %s Line %d Index of %s\n' % (Index, FieldName.split('[')[0], FieldName.split('[')[0], FieldList[FieldName][1], FieldList[FieldName][2], FieldName)
+ if ValueSize > 4:
+ CApp = CApp + ' %s = %dULL; // From %s Line %d Value %s\n' % (indicator, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ else:
+ CApp = CApp + ' %s = %d; // From %s Line %d Value %s\n' % (indicator, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + "}\n"
+ return CApp
+
+ @staticmethod
+ def GenerateDefaultValueAssignStatement(Pcd):
+ CApp = ' Assign_%s_%s_Default_Value(Pcd);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ return CApp
+
+ def GetPcdDscRawDefaultValue(self,Pcd, SkuName,DefaultStoreName):
+ if Pcd.Type in PCD_DYNAMIC_TYPE_SET or Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ if (SkuName, DefaultStoreName) == (TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT):
+ pcddefaultvalue = Pcd.DefaultFromDSC.get(TAB_DEFAULT, {}).get(TAB_DEFAULT_STORES_DEFAULT) if Pcd.DefaultFromDSC else None
+ else:
+ pcddefaultvalue = Pcd.DscRawValue.get(SkuName, {}).get(DefaultStoreName)
+ else:
+ pcddefaultvalue = Pcd.DscRawValue.get(SkuName, {}).get(TAB_DEFAULT_STORES_DEFAULT)
+
+ return pcddefaultvalue
+ def GetPcdDscRawValueInfo(self,Pcd, SkuName,DefaultStoreName):
+ DscValueInfo = Pcd.DscRawValueInfo.get(SkuName, {}).get(DefaultStoreName)
+ if DscValueInfo:
+ dscfilepath,lineno = DscValueInfo
+ else:
+ dscfilepath = self.MetaFile.File
+ lineno = ""
+ return dscfilepath,lineno
+
+ def GenerateInitValueFunction(self, Pcd, SkuName, DefaultStoreName):
+ CApp = "// Value in Dsc for Sku: %s, DefaultStore %s\n" % (SkuName, DefaultStoreName)
+ CApp = CApp + "void Assign_%s_%s_%s_%s_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, SkuName, DefaultStoreName, Pcd.BaseDatumType)
+ CApp = CApp + ' UINT32 FieldSize;\n'
+ CApp = CApp + ' CHAR8 *Value;\n'
+ CApp = CApp + ' UINT32 PcdArraySize;\n'
+
+ CApp = CApp + "// SkuName: %s, DefaultStoreName: %s \n" % (TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT)
+ inherit_OverrideValues = Pcd.SkuOverrideValues[SkuName]
+ dscfilepath,lineno = self.GetPcdDscRawValueInfo(Pcd, SkuName, DefaultStoreName)
+ if lineno:
+ valuefrom = "%s Line %s" % (dscfilepath,str(lineno))
+ else:
+ valuefrom = dscfilepath
+
+ pcddefaultvalue = self.GetPcdDscRawDefaultValue(Pcd, SkuName, DefaultStoreName)
+ if pcddefaultvalue:
+ FieldList = pcddefaultvalue
+ IsArray = _IsFieldValueAnArray(FieldList)
+ if IsArray:
+ if "{CODE(" not in FieldList:
+ try:
+ FieldList = ValueExpressionEx(FieldList, TAB_VOID)(True)
+ except BadExpression:
+ EdkLogger.error("Build", FORMAT_INVALID, "Invalid value format for %s.%s, from DSC: %s" %
+ (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldList))
+ Value, ValueSize = ParseFieldValue (FieldList)
+
+ if (SkuName, DefaultStoreName) == (TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT):
+ if isinstance(Value, str):
+ if "{CODE(" in Value:
+ if Pcd.IsArray() and Pcd.Capacity[-1] != "-1":
+ pcdarraysize = Pcd.PcdArraySize()
+ CApp = CApp + '__STATIC_ASSERT(sizeof(%s_%s_%s_%s_Value) < %d * sizeof(%s), "Pcd %s.%s Value in Dsc exceed the array capability %s"); // From %s \n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType, valuefrom)
+ CApp = CApp+ ' PcdArraySize = sizeof(%s_%s_%s_%s_Value);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
+ CApp = CApp + ' memcpy (Pcd, &%s_%s_%s_%s_Value,PcdArraySize);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
+ else:
+ CApp = CApp + ' Pcd = %s; // From DSC Default Value %s\n' % (Value, Pcd.DefaultFromDSC.get(TAB_DEFAULT, {}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue)
+ elif IsArray:
+ #
+ # Use memcpy() to copy value into field
+ #
+ if Pcd.IsArray():
+ pcdarraysize = Pcd.PcdArraySize()
+ if "{CODE(" in pcddefaultvalue:
+ if Pcd.Capacity[-1] != "-1":
+ CApp = CApp + '__STATIC_ASSERT(sizeof(%s_%s_%s_%s_Value) < %d * sizeof(%s), "Pcd %s.%s Value in Dsc exceed the array capability %s"); // From %s \n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,valuefrom)
+ CApp = CApp + ' PcdArraySize = sizeof(%s_%s_%s_%s_Value);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
+ CApp = CApp + ' memcpy (Pcd, %s_%s_%s_%s_Value, PcdArraySize);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
+ else:
+ if Pcd.Capacity[-1] != "-1":
+ CApp = CApp + '__STATIC_ASSERT(%d < %d * sizeof(%s), "Pcd %s.%s Value in Dsc exceed the array capability %s"); // From %s \n' % (ValueSize,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,valuefrom)
+ CApp = CApp + ' PcdArraySize = %d;\n' % ValueSize
+ CApp = CApp + ' Value = %s; // From DSC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DefaultFromDSC.get(TAB_DEFAULT, {}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue)
+ CApp = CApp + ' memcpy (Pcd, Value, PcdArraySize);\n'
+ else:
+ if "{CODE(" in pcddefaultvalue:
+ CApp = CApp + ' PcdArraySize = %d < sizeof(%s) * %d ? %d: sizeof(%s) * %d;\n ' % (ValueSize,Pcd.BaseDatumType,pcdarraysize,ValueSize,Pcd.BaseDatumType,pcdarraysize)
+ CApp = CApp + ' memcpy (Pcd, &%s_%s_%s_%s_Value, PcdArraySize);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
+ else:
+ CApp = CApp + ' Value = %s; // From DSC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DefaultFromDSC.get(TAB_DEFAULT, {}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue)
+ CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
+ else:
+ if isinstance(Value, str):
+ if "{CODE(" in Value:
+ if Pcd.IsArray() and Pcd.Capacity[-1] != "-1":
+ pcdarraysize = Pcd.PcdArraySize()
+ CApp = CApp + '__STATIC_ASSERT(sizeof(%s_%s_%s_%s_Value) < %d * sizeof(%s), "Pcd %s.%s Value in Dsc exceed the array capability %s"); // From %s \n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,valuefrom)
+ CApp = CApp + ' PcdArraySize = sizeof(%s_%s_%s_%s_Value);\n '% (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
+ CApp = CApp + ' memcpy (Pcd, &%s_%s_%s_%s_Value, PcdArraySize);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
+ else:
+ CApp = CApp + ' Pcd = %s; // From DSC Default Value %s\n' % (Value, Pcd.DscRawValue.get(SkuName, {}).get(DefaultStoreName))
+ elif IsArray:
+ #
+ # Use memcpy() to copy value into field
+ #
+ if Pcd.IsArray():
+ pcdarraysize = Pcd.PcdArraySize()
+ if "{CODE(" in pcddefaultvalue:
+ if Pcd.Capacity[-1] != "-1":
+ CApp = CApp + '__STATIC_ASSERT(sizeof(%s_%s_%s_%s_Value) < %d * sizeof(%s), "Pcd %s.%s Value in Dsc exceed the array capability %s"); // From %s \n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,valuefrom)
+ CApp + ' PcdArraySize = sizeof(%s_%s_%s_%s_Value);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
+ CApp = CApp + ' memcpy (Pcd, %s_%s_%s_%s_Value, PcdArraySize);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
+ else:
+ if Pcd.Capacity[-1] != "-1":
+ CApp = CApp + '__STATIC_ASSERT(%d < %d * sizeof(%s), "Pcd %s.%s Value in Dsc exceed the array capability %s"); // From %s \n' % (ValueSize,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,valuefrom)
+ CApp = CApp + ' PcdArraySize = %d;\n' % ValueSize
+ CApp = CApp + ' Value = %s; // From DSC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DscRawValue.get(TAB_DEFAULT, {}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue)
+ CApp = CApp + ' memcpy (Pcd, Value, PcdArraySize);\n'
+ else:
+ if "{CODE(" in pcddefaultvalue:
+ CApp = CApp + ' PcdArraySize = %d < sizeof(%s) * %d ? %d: sizeof(%s) * %d;\n ' % (ValueSize,Pcd.BaseDatumType,pcdarraysize,ValueSize,Pcd.BaseDatumType,pcdarraysize)
+ CApp = CApp + ' memcpy (Pcd, &%s_%s_%s_%s_Value, PcdArraySize);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
+ else:
+ CApp = CApp + ' Value = %s; // From DSC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DscRawValue.get(SkuName, {}).get(DefaultStoreName))
+ CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
+
+ inheritvalue = inherit_OverrideValues.get(DefaultStoreName)
+ if not inheritvalue:
+ inheritvalue = []
+ for index in inheritvalue:
+ FieldList = inheritvalue[index]
+ if not FieldList:
+ continue
+ if (SkuName, DefaultStoreName) == (TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT) or (( (SkuName, '') not in Pcd.ValueChain) and ( (SkuName, DefaultStoreName) not in Pcd.ValueChain )):
+ for FieldName in FieldList:
+ indicator = self.GetIndicator(index, FieldName,Pcd)
+ IsArray = _IsFieldValueAnArray(FieldList[FieldName][0])
+ if IsArray:
+ try:
+ FieldList[FieldName][0] = ValueExpressionEx(FieldList[FieldName][0], TAB_VOID, self._GuidDict)(True)
+ except BadExpression:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
+ (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
+ try:
+ Value, ValueSize = ParseFieldValue (FieldList[FieldName][0])
+ except Exception:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
+ if isinstance(Value, str):
+ CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ elif IsArray:
+ #
+ # Use memcpy() to copy value into field
+ #
+ CApp = CApp + ' FieldSize = __FIELD_SIZE(%s, %s);\n' % (Pcd.BaseDatumType, FieldName)
+ CApp = CApp + ' Value = %s; // From %s Line %d Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + ' __STATIC_ASSERT((__FIELD_SIZE(%s, %s) >= %d) || (__FIELD_SIZE(%s, %s) == 0), "Input buffer exceeds the buffer array"); // From %s Line %d Value %s\n' % (Pcd.BaseDatumType, FieldName, ValueSize, Pcd.BaseDatumType, FieldName, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + ' memcpy (&%s, Value, (FieldSize > 0 && FieldSize < %d) ? FieldSize : %d);\n' % (indicator, ValueSize, ValueSize)
+ else:
+ if '[' in FieldName and ']' in FieldName:
+ Index = int(FieldName.split('[')[1].split(']')[0])
+ CApp = CApp + ' __STATIC_ASSERT((%d < __ARRAY_SIZE(Pcd->%s)) || (__ARRAY_SIZE(Pcd->%s) == 0), "array index exceeds the array number"); // From %s Line %d Index of %s\n' % (Index, FieldName.split('[')[0], FieldName.split('[')[0], FieldList[FieldName][1], FieldList[FieldName][2], FieldName)
+ if ValueSize > 4:
+ CApp = CApp + ' %s = %dULL; // From %s Line %d Value %s\n' % (indicator, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ else:
+ CApp = CApp + ' %s = %d; // From %s Line %d Value %s\n' % (indicator, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + "}\n"
+ return CApp
+
+ @staticmethod
+ def GenerateInitValueStatement(Pcd, SkuName, DefaultStoreName):
+ CApp = ' Assign_%s_%s_%s_%s_Value(Pcd);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, SkuName, DefaultStoreName)
+ return CApp
+
+ def GenerateCommandLineValue(self, Pcd):
+ CApp = "// Value in CommandLine\n"
+ CApp = CApp + "void Assign_%s_%s_CommandLine_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.BaseDatumType)
+ CApp = CApp + ' UINT32 FieldSize;\n'
+ CApp = CApp + ' CHAR8 *Value;\n'
+
+ pcddefaultvalue = Pcd.PcdValueFromComm
+ for FieldList in [pcddefaultvalue, Pcd.PcdFieldValueFromComm]:
+ if not FieldList:
+ continue
+ if pcddefaultvalue and FieldList == pcddefaultvalue:
+ IsArray = _IsFieldValueAnArray(FieldList)
+ if IsArray:
+ try:
+ FieldList = ValueExpressionEx(FieldList, TAB_VOID)(True)
+ except BadExpression:
+ EdkLogger.error("Build", FORMAT_INVALID, "Invalid value format for %s.%s, from Command: %s" %
+ (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldList))
+ Value, ValueSize = ParseFieldValue (FieldList)
+
+ if isinstance(Value, str):
+ CApp = CApp + ' Pcd = %s; // From Command Line \n' % (Value)
+ elif IsArray:
+ #
+ # Use memcpy() to copy value into field
+ #
+ CApp = CApp + ' Value = %s; // From Command Line.\n' % (DscBuildData.IntToCString(Value, ValueSize))
+ CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
+ continue
+ for FieldName in FieldList:
+ IsArray = _IsFieldValueAnArray(FieldList[FieldName][0])
+ if IsArray:
+ try:
+ FieldList[FieldName][0] = ValueExpressionEx(FieldList[FieldName][0], TAB_VOID, self._GuidDict)(True)
+ except BadExpression:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
+ (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
+ except:
+ print("error")
+ try:
+ Value, ValueSize = ParseFieldValue (FieldList[FieldName][0])
+ except Exception:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
+ if isinstance(Value, str):
+ CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ elif IsArray:
+ #
+ # Use memcpy() to copy value into field
+ #
+ CApp = CApp + ' FieldSize = __FIELD_SIZE(%s, %s);\n' % (Pcd.BaseDatumType, FieldName)
+ CApp = CApp + ' Value = %s; // From %s Line %d Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + ' __STATIC_ASSERT((__FIELD_SIZE(%s, %s) >= %d) || (__FIELD_SIZE(%s, %s) == 0), "Input buffer exceeds the buffer array"); // From %s Line %d Value %s\n' % (Pcd.BaseDatumType, FieldName, ValueSize, Pcd.BaseDatumType, FieldName, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + ' memcpy (&Pcd->%s, Value, (FieldSize > 0 && FieldSize < %d) ? FieldSize : %d);\n' % (FieldName, ValueSize, ValueSize)
+ else:
+ if '[' in FieldName and ']' in FieldName:
+ Index = int(FieldName.split('[')[1].split(']')[0])
+ CApp = CApp + ' __STATIC_ASSERT((%d < __ARRAY_SIZE(Pcd->%s)) || (__ARRAY_SIZE(Pcd->%s) == 0), "array index exceeds the array number"); // From %s Line %d Index of %s\n' % (Index, FieldName.split('[')[0], FieldName.split('[')[0], FieldList[FieldName][1], FieldList[FieldName][2], FieldName)
+ if ValueSize > 4:
+ CApp = CApp + ' Pcd->%s = %dULL; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ else:
+ CApp = CApp + ' Pcd->%s = %d; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + "}\n"
+ return CApp
+
+ def GenerateModuleScopeValue(self, Pcd):
+ CApp = "// Value in Dsc Module scope \n"
+ for ModuleGuid in Pcd.PcdFiledValueFromDscComponent:
+
+ CApp = CApp + "void Assign_%s_%s_%s_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, ModuleGuid,Pcd.BaseDatumType)
+ CApp = CApp + ' UINT32 FieldSize;\n'
+ CApp = CApp + ' CHAR8 *Value;\n'
+ pcddefaultvalue, file_path,lineNo = Pcd.PcdValueFromComponents.get(ModuleGuid,(None,None,None))
+
+ if pcddefaultvalue:
+ IsArray = _IsFieldValueAnArray(pcddefaultvalue)
+ if IsArray:
+ try:
+ FieldList = ValueExpressionEx(pcddefaultvalue, TAB_VOID)(True)
+ except BadExpression:
+ EdkLogger.error("Build", FORMAT_INVALID, "Invalid value format for %s.%s, from %s Line %s: %s" %
+ (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, file_path, lineNo, FieldList))
+ Value, ValueSize = ParseFieldValue (FieldList)
+
+ if isinstance(Value, str):
+ CApp = CApp + ' Pcd = %s; // From %s Line %s \n' % (Value, file_path, lineNo)
+ elif IsArray:
+ #
+ # Use memcpy() to copy value into field
+ #
+ CApp = CApp + ' Value = %s; // From %s Line %s.\n' % (DscBuildData.IntToCString(Value, ValueSize), file_path, lineNo)
+ CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
+
+
+ PcdFiledValue = Pcd.PcdFiledValueFromDscComponent.get(ModuleGuid)
+ for index in PcdFiledValue:
+ FieldList = PcdFiledValue[index]
+ if not FieldList:
+ continue
+ for FieldName in FieldList:
+ IsArray = _IsFieldValueAnArray(FieldList[FieldName][0])
+ if IsArray:
+ try:
+ FieldList[FieldName][0] = ValueExpressionEx(FieldList[FieldName][0], TAB_VOID, self._GuidDict)(True)
+ except BadExpression:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
+ (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
+ except:
+ print("error")
+ try:
+ Value, ValueSize = ParseFieldValue (FieldList[FieldName][0])
+ except Exception:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
+ if isinstance(Value, str):
+ CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ elif IsArray:
+ #
+ # Use memcpy() to copy value into field
+ #
+ CApp = CApp + ' FieldSize = __FIELD_SIZE(%s, %s);\n' % (Pcd.BaseDatumType, FieldName)
+ CApp = CApp + ' Value = %s; // From %s Line %d Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + ' __STATIC_ASSERT((__FIELD_SIZE(%s, %s) >= %d) || (__FIELD_SIZE(%s, %s) == 0), "Input buffer exceeds the buffer array"); // From %s Line %d Value %s\n' % (Pcd.BaseDatumType, FieldName, ValueSize, Pcd.BaseDatumType, FieldName, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + ' memcpy (&Pcd->%s, Value, (FieldSize > 0 && FieldSize < %d) ? FieldSize : %d);\n' % (FieldName, ValueSize, ValueSize)
+ else:
+ if '[' in FieldName and ']' in FieldName:
+ Index = int(FieldName.split('[')[1].split(']')[0])
+ CApp = CApp + ' __STATIC_ASSERT((%d < __ARRAY_SIZE(Pcd->%s)) || (__ARRAY_SIZE(Pcd->%s) == 0), "array index exceeds the array number"); // From %s Line %d Index of %s\n' % (Index, FieldName.split('[')[0], FieldName.split('[')[0], FieldList[FieldName][1], FieldList[FieldName][2], FieldName)
+ if ValueSize > 4:
+ CApp = CApp + ' Pcd->%s = %dULL; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ else:
+ CApp = CApp + ' Pcd->%s = %d; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + "}\n"
+ return CApp
+
+ @staticmethod
+ def GenerateCommandLineValueStatement(Pcd):
+ CApp = ' Assign_%s_%s_CommandLine_Value(Pcd);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ return CApp
+ def GenerateFdfValue(self,Pcd):
+ CApp = "// Value in Fdf\n"
+ CApp = CApp + "void Assign_%s_%s_Fdf_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.BaseDatumType)
+ CApp = CApp + ' UINT32 FieldSize;\n'
+ CApp = CApp + ' CHAR8 *Value;\n'
+
+ pcddefaultvalue = Pcd.PcdValueFromFdf
+ for FieldList in [pcddefaultvalue,Pcd.PcdFieldValueFromFdf]:
+ if not FieldList:
+ continue
+ if pcddefaultvalue and FieldList == pcddefaultvalue:
+ IsArray = _IsFieldValueAnArray(FieldList)
+ if IsArray:
+ try:
+ FieldList = ValueExpressionEx(FieldList, TAB_VOID)(True)
+ except BadExpression:
+ EdkLogger.error("Build", FORMAT_INVALID, "Invalid value format for %s.%s, from Fdf: %s" %
+ (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldList))
+ Value, ValueSize = ParseFieldValue (FieldList)
+
+ if isinstance(Value, str):
+ CApp = CApp + ' Pcd = %s; // From Fdf \n' % (Value)
+ elif IsArray:
+ #
+ # Use memcpy() to copy value into field
+ #
+ CApp = CApp + ' Value = %s; // From Fdf .\n' % (DscBuildData.IntToCString(Value, ValueSize))
+ CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
+ continue
+ for FieldName in FieldList:
+ IsArray = _IsFieldValueAnArray(FieldList[FieldName][0])
+ if IsArray:
+ try:
+ FieldList[FieldName][0] = ValueExpressionEx(FieldList[FieldName][0], TAB_VOID, self._GuidDict)(True)
+ except BadExpression:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
+ (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
+ except:
+ print("error")
+ try:
+ Value, ValueSize = ParseFieldValue (FieldList[FieldName][0])
+ except Exception:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName,FieldName)),FieldList[FieldName][1], FieldList[FieldName][2]))
+ if isinstance(Value, str):
+ CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ elif IsArray:
+ #
+ # Use memcpy() to copy value into field
+ #
+ CApp = CApp + ' FieldSize = __FIELD_SIZE(%s, %s);\n' % (Pcd.BaseDatumType, FieldName)
+ CApp = CApp + ' Value = %s; // From %s Line %d Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + ' __STATIC_ASSERT((__FIELD_SIZE(%s, %s) >= %d) || (__FIELD_SIZE(%s, %s) == 0), "Input buffer exceeds the buffer array"); // From %s Line %d Value %s\n' % (Pcd.BaseDatumType, FieldName, ValueSize, Pcd.BaseDatumType, FieldName, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + ' memcpy (&Pcd->%s, Value, (FieldSize > 0 && FieldSize < %d) ? FieldSize : %d);\n' % (FieldName, ValueSize, ValueSize)
+ else:
+ if '[' in FieldName and ']' in FieldName:
+ Index = int(FieldName.split('[')[1].split(']')[0])
+ CApp = CApp + ' __STATIC_ASSERT((%d < __ARRAY_SIZE(Pcd->%s)) || (__ARRAY_SIZE(Pcd->%s) == 0), "array index exceeds the array number"); // From %s Line %d Index of %s\n' % (Index, FieldName.split('[')[0], FieldName.split('[')[0], FieldList[FieldName][1], FieldList[FieldName][2], FieldName)
+ if ValueSize > 4:
+ CApp = CApp + ' Pcd->%s = %dULL; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ else:
+ CApp = CApp + ' Pcd->%s = %d; // From %s Line %s Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
+ CApp = CApp + "}\n"
+ return CApp
+
+ @staticmethod
+ def GenerateFdfValueStatement(Pcd):
+ CApp = ' Assign_%s_%s_Fdf_Value(Pcd);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ return CApp
+
+ @staticmethod
+ def GenerateModuleValueStatement(module_guid, Pcd):
+ CApp = " Assign_%s_%s_%s_Value(Pcd);\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, module_guid)
+ return CApp
+ def GenerateModuleScopeInitializeFunc(self,SkuName, Pcd, InitByteValue, CApp):
+ for module_guid in Pcd.PcdFiledValueFromDscComponent:
+ CApp = CApp + 'void\n'
+ CApp = CApp + 'Initialize_%s_%s_%s_%s(\n' % (module_guid, TAB_DEFAULT_STORES_DEFAULT, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ CApp = CApp + ' void\n'
+ CApp = CApp + ' )\n'
+ CApp = CApp + '{\n'
+ CApp = CApp + ' UINT32 Size;\n'
+ CApp = CApp + ' UINT32 FieldSize;\n'
+ CApp = CApp + ' CHAR8 *Value;\n'
+ CApp = CApp + ' UINT32 OriginalSize;\n'
+ CApp = CApp + ' VOID *OriginalPcd;\n'
+
+ CApp = CApp + ' %s *Pcd; // From %s Line %d \n' % (Pcd.BaseDatumType,Pcd.PkgPath, Pcd.PcdDefineLineNo)
+
+ CApp = CApp + '\n'
+
+ PcdDefaultValue = StringToArray(Pcd.DefaultValueFromDec.strip())
+ InitByteValue += '%s.%s.%s.%s|%s|%s\n' % (module_guid, TAB_DEFAULT_STORES_DEFAULT, Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.DatumType, PcdDefaultValue)
+ #
+ # Get current PCD value and size
+ #
+ CApp = CApp + ' OriginalPcd = PcdGetPtr (%s, %s, %s, %s, &OriginalSize);\n' % (module_guid, TAB_DEFAULT_STORES_DEFAULT, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+
+ #
+ # Determine the size of the PCD. For simple structures, sizeof(TYPE) provides
+ # the correct value. For structures with a flexible array member, the flexible
+ # array member is detected, and the size is based on the highest index used with
+ # the flexible array member. The flexible array member must be the last field
+ # in a structure. The size formula for this case is:
+ # OFFSET_OF(FlexbleArrayField) + sizeof(FlexibleArray[0]) * (HighestIndex + 1)
+ #
+ CApp = CApp + DscBuildData.GenerateSizeStatments(Pcd,SkuName,TAB_DEFAULT_STORES_DEFAULT)
+ if Pcd.IsArray() and Pcd.Capacity[-1] != "-1":
+ CApp = CApp + ' OriginalSize = OriginalSize < sizeof(%s) * %d? OriginalSize:sizeof(%s) * %d; \n' % (Pcd.BaseDatumType,Pcd.PcdArraySize(),Pcd.BaseDatumType,Pcd.PcdArraySize())
+ CApp = CApp + ' Size = sizeof(%s) * %d; \n' % (Pcd.BaseDatumType,Pcd.PcdArraySize())
+
+ #
+ # Allocate and zero buffer for the PCD
+ # Must handle cases where current value is smaller, larger, or same size
+ # Always keep that larger one as the current size
+ #
+ CApp = CApp + ' Size = (OriginalSize > Size ? OriginalSize : Size);\n'
+ CApp = CApp + ' Pcd = (%s *)malloc (Size);\n' % (Pcd.BaseDatumType,)
+ CApp = CApp + ' memset (Pcd, 0, Size);\n'
+
+ #
+ # Copy current PCD value into allocated buffer.
+ #
+ CApp = CApp + ' memcpy (Pcd, OriginalPcd, OriginalSize);\n'
+
+ #
+ # Assign field values in PCD
+ #
+ CApp = CApp + DscBuildData.GenerateDefaultValueAssignStatement(Pcd)
+
+ CApp = CApp + "// SkuName: %s, DefaultStoreName: STANDARD \n" % self.SkuIdMgr.SystemSkuId
+ CApp = CApp + DscBuildData.GenerateInitValueStatement(Pcd, self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT)
+ CApp = CApp + DscBuildData.GenerateModuleValueStatement(module_guid,Pcd)
+ CApp = CApp + DscBuildData.GenerateFdfValueStatement(Pcd)
+ CApp = CApp + DscBuildData.GenerateCommandLineValueStatement(Pcd)
+
+ #
+ # Set new PCD value and size
+ #
+ CApp = CApp + ' PcdSetPtr (%s, %s, %s, %s, Size, (void *)Pcd);\n' % (module_guid, TAB_DEFAULT_STORES_DEFAULT, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+
+ #
+ # Free PCD
+ #
+ CApp = CApp + ' free (Pcd);\n'
+ CApp = CApp + '}\n'
+ CApp = CApp + '\n'
+ return InitByteValue,CApp
+
+ def GenerateInitializeFunc(self, SkuName, DefaultStore, Pcd, InitByteValue, CApp):
+ OverrideValues = {DefaultStore:{}}
+ if Pcd.SkuOverrideValues:
+ OverrideValues = Pcd.SkuOverrideValues[SkuName]
+ if not OverrideValues:
+ OverrideValues = {TAB_DEFAULT_STORES_DEFAULT:Pcd.DefaultValues}
+ for DefaultStoreName in OverrideValues:
+ CApp = CApp + 'void\n'
+ CApp = CApp + 'Initialize_%s_%s_%s_%s(\n' % (SkuName, DefaultStoreName, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ CApp = CApp + ' void\n'
+ CApp = CApp + ' )\n'
+ CApp = CApp + '{\n'
+ CApp = CApp + ' UINT32 Size;\n'
+ CApp = CApp + ' UINT32 FieldSize;\n'
+ CApp = CApp + ' CHAR8 *Value;\n'
+ CApp = CApp + ' UINT32 OriginalSize;\n'
+ CApp = CApp + ' VOID *OriginalPcd;\n'
+
+ CApp = CApp + ' %s *Pcd; // From %s Line %d \n' % (Pcd.BaseDatumType,Pcd.PkgPath, Pcd.PcdDefineLineNo)
+
+ CApp = CApp + '\n'
+
+ PcdDefaultValue = StringToArray(Pcd.DefaultValueFromDec.strip())
+
+ InitByteValue += '%s.%s.%s.%s|%s|%s\n' % (SkuName, DefaultStoreName, Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.DatumType, PcdDefaultValue)
+
+ #
+ # Get current PCD value and size
+ #
+ CApp = CApp + ' OriginalPcd = PcdGetPtr (%s, %s, %s, %s, &OriginalSize);\n' % (SkuName, DefaultStoreName, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+
+ #
+ # Determine the size of the PCD. For simple structures, sizeof(TYPE) provides
+ # the correct value. For structures with a flexible array member, the flexible
+ # array member is detected, and the size is based on the highest index used with
+ # the flexible array member. The flexible array member must be the last field
+ # in a structure. The size formula for this case is:
+ # OFFSET_OF(FlexbleArrayField) + sizeof(FlexibleArray[0]) * (HighestIndex + 1)
+ #
+ CApp = CApp + DscBuildData.GenerateSizeStatments(Pcd,SkuName,DefaultStoreName)
+ if Pcd.IsArray() and Pcd.Capacity[-1] != "-1":
+ CApp = CApp + ' OriginalSize = OriginalSize < sizeof(%s) * %d? OriginalSize:sizeof(%s) * %d; \n' % (Pcd.BaseDatumType,Pcd.PcdArraySize(),Pcd.BaseDatumType,Pcd.PcdArraySize())
+ CApp = CApp + ' Size = sizeof(%s) * %d; \n' % (Pcd.BaseDatumType,Pcd.PcdArraySize())
+
+ #
+ # Allocate and zero buffer for the PCD
+ # Must handle cases where current value is smaller, larger, or same size
+ # Always keep that larger one as the current size
+ #
+ CApp = CApp + ' Size = (OriginalSize > Size ? OriginalSize : Size);\n'
+ CApp = CApp + ' Pcd = (%s *)malloc (Size);\n' % (Pcd.BaseDatumType,)
+ CApp = CApp + ' memset (Pcd, 0, Size);\n'
+
+ #
+ # Copy current PCD value into allocated buffer.
+ #
+ CApp = CApp + ' memcpy (Pcd, OriginalPcd, OriginalSize);\n'
+
+ #
+ # Assign field values in PCD
+ #
+ CApp = CApp + DscBuildData.GenerateDefaultValueAssignStatement(Pcd)
+ if Pcd.Type not in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
+ self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
+ for skuname in self.SkuIdMgr.GetSkuChain(SkuName):
+ storeset = [DefaultStoreName] if DefaultStoreName == TAB_DEFAULT_STORES_DEFAULT else [TAB_DEFAULT_STORES_DEFAULT, DefaultStoreName]
+ for defaultstorenameitem in storeset:
+ CApp = CApp + "// SkuName: %s, DefaultStoreName: %s \n" % (skuname, defaultstorenameitem)
+ CApp = CApp + DscBuildData.GenerateInitValueStatement(Pcd, skuname, defaultstorenameitem)
+ if skuname == SkuName:
+ break
+ else:
+ CApp = CApp + "// SkuName: %s, DefaultStoreName: STANDARD \n" % self.SkuIdMgr.SystemSkuId
+ CApp = CApp + DscBuildData.GenerateInitValueStatement(Pcd, self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT)
+ CApp = CApp + DscBuildData.GenerateFdfValueStatement(Pcd)
+ CApp = CApp + DscBuildData.GenerateCommandLineValueStatement(Pcd)
+ #
+ # Set new PCD value and size
+ #
+ CApp = CApp + ' PcdSetPtr (%s, %s, %s, %s, Size, (void *)Pcd);\n' % (SkuName, DefaultStoreName, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+
+ #
+ # Free PCD
+ #
+ CApp = CApp + ' free (Pcd);\n'
+ CApp = CApp + '}\n'
+ CApp = CApp + '\n'
+ return InitByteValue, CApp
+
+ def GenerateArrayAssignment(self, Pcd):
+ CApp = ""
+ if not Pcd:
+ return CApp
+ Demesion = ""
+ for d in Pcd.Capacity:
+ Demesion += "[]"
+
+ Value = Pcd.DefaultValueFromDec
+ if "{CODE(" in Pcd.DefaultValueFromDec:
+ realvalue = Pcd.DefaultValueFromDec.strip()[6:-2] # "{CODE(").rstrip(")}"
+ CApp += "static %s %s_%s_INIT_Value%s = %s;\n" % (Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,Demesion,realvalue)
+
+ if Pcd.Type in PCD_DYNAMIC_TYPE_SET | PCD_DYNAMIC_EX_TYPE_SET:
+ for skuname in Pcd.SkuInfoList:
+ skuinfo = Pcd.SkuInfoList[skuname]
+ if skuinfo.VariableName:
+ for defaultstore in skuinfo.DefaultStoreDict:
+ pcddscrawdefaultvalue = self.GetPcdDscRawDefaultValue(Pcd, skuname, defaultstore)
+ if pcddscrawdefaultvalue:
+ Value = skuinfo.DefaultStoreDict[defaultstore]
+ if "{CODE(" in Value:
+ realvalue = Value.strip()[6:-2] # "{CODE(").rstrip(")}"
+ CApp += "static %s %s_%s_%s_%s_Value%s = %s;\n" % (Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,skuname,defaultstore,Demesion,realvalue)
+ else:
+ pcddscrawdefaultvalue = self.GetPcdDscRawDefaultValue(Pcd, skuname, TAB_DEFAULT_STORES_DEFAULT)
+ if pcddscrawdefaultvalue:
+ Value = skuinfo.DefaultValue
+ if "{CODE(" in Value:
+ realvalue = Value.strip()[6:-2] # "{CODE(").rstrip(")}"
+ CApp += "static %s %s_%s_%s_%s_Value%s = %s;\n" % (Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,skuname,TAB_DEFAULT_STORES_DEFAULT,Demesion,realvalue)
+ else:
+ pcddscrawdefaultvalue = self.GetPcdDscRawDefaultValue(Pcd, TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT)
+ if pcddscrawdefaultvalue:
+ if "{CODE(" in Pcd.DefaultValue:
+ realvalue = Pcd.DefaultValue.strip()[6:-2] # "{CODE(").rstrip(")}"
+ CApp += "static %s %s_%s_DEFAULT_STANDARD_Value%s = %s;\n" % (Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,Demesion,realvalue)
+
+ return CApp
+
+ def SkuOverrideValuesEmpty(self,OverrideValues):
+ if not OverrideValues:
+ return True
+ for key in OverrideValues:
+ if OverrideValues[key]:
+ return False
+ return True
+
+ def ParseCCFlags(self, ccflag):
+ ccflags = set()
+ ccflaglist = ccflag.split(" ")
+ i = 0
+ while i < len(ccflaglist):
+ item = ccflaglist[i].strip()
+ if item in (r"/D", r"/U","-D","-U"):
+ ccflags.add(" ".join((ccflaglist[i],ccflaglist[i+1])))
+ i = i+1
+ elif item.startswith((r"/D", r"/U","-D","-U")):
+ ccflags.add(item)
+ i +=1
+ return ccflags
+ def GenerateByteArrayValue (self, StructuredPcds):
+ #
+ # Generate/Compile/Run C application to determine if there are any flexible array members
+ #
+ if not StructuredPcds:
+ return
+
+ InitByteValue = ""
+ CApp = PcdMainCHeader
+
+ IncludeFiles = set()
+ for PcdName in StructuredPcds:
+ Pcd = StructuredPcds[PcdName]
+ for IncludeFile in Pcd.StructuredPcdIncludeFile:
+ if IncludeFile not in IncludeFiles:
+ IncludeFiles.add(IncludeFile)
+ CApp = CApp + '#include <%s>\n' % (IncludeFile)
+ CApp = CApp + '\n'
+ for Pcd in StructuredPcds.values():
+ CApp = CApp + self.GenerateArrayAssignment(Pcd)
+ for PcdName in sorted(StructuredPcds.keys()):
+ Pcd = StructuredPcds[PcdName]
+
+ #create void void Cal_tocken_cname_Size functions
+ CApp = CApp + self.GenerateSizeFunction(Pcd)
+
+ #create void Assign_ functions
+
+ # From DEC
+ CApp = CApp + self.GenerateDefaultValueAssignFunction(Pcd)
+ # From Fdf
+ CApp = CApp + self.GenerateFdfValue(Pcd)
+ # From CommandLine
+ CApp = CApp + self.GenerateCommandLineValue(Pcd)
+
+ # From Dsc Global setting
+ if self.SkuOverrideValuesEmpty(Pcd.SkuOverrideValues) or Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
+ self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
+ CApp = CApp + self.GenerateInitValueFunction(Pcd, self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT)
+ else:
+ for SkuName in self.SkuIdMgr.SkuOverrideOrder():
+ if SkuName not in Pcd.SkuOverrideValues:
+ continue
+ for DefaultStoreName in Pcd.SkuOverrideValues[SkuName]:
+ CApp = CApp + self.GenerateInitValueFunction(Pcd, SkuName, DefaultStoreName)
+
+ # From Dsc module scope setting
+ CApp = CApp + self.GenerateModuleScopeValue(Pcd)
+
+ #create Initialize_ functions
+ if self.SkuOverrideValuesEmpty(Pcd.SkuOverrideValues) or Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
+ self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
+ InitByteValue, CApp = self.GenerateInitializeFunc(self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT, Pcd, InitByteValue, CApp)
+ InitByteValue, CApp = self.GenerateModuleScopeInitializeFunc(self.SkuIdMgr.SystemSkuId,Pcd,InitByteValue,CApp)
+ else:
+ for SkuName in self.SkuIdMgr.SkuOverrideOrder():
+ if SkuName not in Pcd.SkuOverrideValues:
+ continue
+ for DefaultStoreName in Pcd.DefaultStoreName:
+ Pcd = StructuredPcds[PcdName]
+ InitByteValue, CApp = self.GenerateInitializeFunc(SkuName, DefaultStoreName, Pcd, InitByteValue, CApp)
+
+ CApp = CApp + 'VOID\n'
+ CApp = CApp + 'PcdEntryPoint(\n'
+ CApp = CApp + ' VOID\n'
+ CApp = CApp + ' )\n'
+ CApp = CApp + '{\n'
+ for Pcd in StructuredPcds.values():
+ if self.SkuOverrideValuesEmpty(Pcd.SkuOverrideValues) or Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD], self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
+ CApp = CApp + ' Initialize_%s_%s_%s_%s();\n' % (self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ for ModuleGuid in Pcd.PcdFiledValueFromDscComponent:
+ CApp += " Initialize_%s_%s_%s_%s();\n" % (ModuleGuid,TAB_DEFAULT_STORES_DEFAULT ,Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ else:
+ for SkuName in self.SkuIdMgr.SkuOverrideOrder():
+ if SkuName not in self.SkuIdMgr.AvailableSkuIdSet:
+ continue
+ for DefaultStoreName in Pcd.SkuOverrideValues[SkuName]:
+ CApp = CApp + ' Initialize_%s_%s_%s_%s();\n' % (SkuName, DefaultStoreName, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ CApp = CApp + '}\n'
+
+ CApp = CApp + PcdMainCEntry + '\n'
+
+ if not os.path.exists(self.OutputPath):
+ os.makedirs(self.OutputPath)
+ CAppBaseFileName = os.path.join(self.OutputPath, PcdValueInitName)
+ SaveFileOnChange(CAppBaseFileName + '.c', CApp, False)
+
+ # start generating makefile
+ MakeApp = PcdMakefileHeader
+ if sys.platform == "win32":
+ MakeApp = MakeApp + 'APPFILE = %s\%s.exe\n' % (self.OutputPath, PcdValueInitName) + 'APPNAME = %s\n' % (PcdValueInitName) + 'OBJECTS = %s\%s.obj %s.obj\n' % (self.OutputPath, PcdValueInitName, os.path.join(self.OutputPath, PcdValueCommonName)) + 'INC = '
+ else:
+ MakeApp = MakeApp + PcdGccMakefile
+ MakeApp = MakeApp + 'APPFILE = %s/%s\n' % (self.OutputPath, PcdValueInitName) + 'APPNAME = %s\n' % (PcdValueInitName) + 'OBJECTS = %s/%s.o %s.o\n' % (self.OutputPath, PcdValueInitName, os.path.join(self.OutputPath, PcdValueCommonName)) + \
+ 'include $(MAKEROOT)/Makefiles/app.makefile\n' + 'TOOL_INCLUDE +='
+
+ IncSearchList = []
+ PlatformInc = OrderedDict()
+ for Cache in self._Bdb._CACHE_.values():
+ if Cache.MetaFile.Ext.lower() != '.dec':
+ continue
+ if Cache.Includes:
+ if str(Cache.MetaFile.Path) not in PlatformInc:
+ PlatformInc[str(Cache.MetaFile.Path)] = []
+ PlatformInc[str(Cache.MetaFile.Path)].append (os.path.dirname(Cache.MetaFile.Path))
+ PlatformInc[str(Cache.MetaFile.Path)].extend (Cache.CommonIncludes)
+
+ PcdDependDEC = []
+ for Pcd in StructuredPcds.values():
+ for PackageDec in Pcd.PackageDecs:
+ Package = os.path.normpath(mws.join(GlobalData.gWorkspace, PackageDec))
+ if not os.path.exists(Package):
+ EdkLogger.error('Build', RESOURCE_NOT_AVAILABLE, "The dependent Package %s of PCD %s.%s is not exist." % (PackageDec, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ if Package not in PcdDependDEC:
+ PcdDependDEC.append(Package)
+
+ if PlatformInc and PcdDependDEC:
+ for pkg in PcdDependDEC:
+ if pkg in PlatformInc:
+ for inc in PlatformInc[pkg]:
+ #
+ # Get list of files in potential -I include path
+ #
+ FileList = os.listdir (str(inc))
+ #
+ # Skip -I include path if one of the include files required
+ # by PcdValueInit.c are present in the include paths from
+ # the DEC file. PcdValueInit.c must use the standard include
+ # files from the host compiler.
+ #
+ if 'stdio.h' in FileList:
+ continue
+ if 'stdlib.h' in FileList:
+ continue
+ if 'string.h' in FileList:
+ continue
+ MakeApp += '-I' + str(inc) + ' '
+ IncSearchList.append(inc)
+ MakeApp = MakeApp + '\n'
+
+ CC_FLAGS = LinuxCFLAGS
+ if sys.platform == "win32":
+ CC_FLAGS = WindowsCFLAGS
+ BuildOptions = OrderedDict()
+ for Options in self.BuildOptions:
+ if Options[2] != EDKII_NAME:
+ continue
+ Family = Options[0]
+ if Family and Family != self.ToolChainFamily:
+ continue
+ Target, Tag, Arch, Tool, Attr = Options[1].split("_")
+ if Tool != 'CC':
+ continue
+ if Attr != "FLAGS":
+ continue
+ if Target == TAB_STAR or Target == self._Target:
+ if Tag == TAB_STAR or Tag == self._Toolchain:
+ if 'COMMON' not in BuildOptions:
+ BuildOptions['COMMON'] = set()
+ if Arch == TAB_STAR:
+ BuildOptions['COMMON']|= self.ParseCCFlags(self.BuildOptions[Options])
+ if Arch in self.SupArchList:
+ if Arch not in BuildOptions:
+ BuildOptions[Arch] = set()
+ BuildOptions[Arch] |= self.ParseCCFlags(self.BuildOptions[Options])
+
+ if BuildOptions:
+ ArchBuildOptions = {arch:flags for arch,flags in BuildOptions.items() if arch != 'COMMON'}
+ if len(ArchBuildOptions.keys()) == 1:
+ BuildOptions['COMMON'] |= (list(ArchBuildOptions.values())[0])
+ elif len(ArchBuildOptions.keys()) > 1:
+ CommonBuildOptions = reduce(lambda x,y: x&y, ArchBuildOptions.values())
+ BuildOptions['COMMON'] |= CommonBuildOptions
+ ValueList = [item for item in BuildOptions['COMMON'] if item.startswith((r"/U","-U"))]
+ ValueList.extend([item for item in BuildOptions['COMMON'] if item.startswith((r"/D", "-D"))])
+ CC_FLAGS += " ".join(ValueList)
+ MakeApp += CC_FLAGS
+
+ if sys.platform == "win32":
+ MakeApp = MakeApp + PcdMakefileEnd
+ MakeApp = MakeApp + AppTarget % ("""\tcopy $(APPLICATION) $(APPFILE) /y """)
+ else:
+ MakeApp = MakeApp + AppTarget % ("""\tcp $(APPLICATION) $(APPFILE) """)
+ MakeApp = MakeApp + '\n'
+ IncludeFileFullPaths = []
+ for includefile in IncludeFiles:
+ for includepath in IncSearchList:
+ includefullpath = os.path.join(str(includepath), includefile)
+ if os.path.exists(includefullpath):
+ IncludeFileFullPaths.append(os.path.normpath(includefullpath))
+ break
+ SearchPathList = []
+ SearchPathList.append(os.path.normpath(mws.join(GlobalData.gGlobalDefines["EDK_TOOLS_PATH"], "BaseTools/Source/C/Include")))
+ SearchPathList.append(os.path.normpath(mws.join(GlobalData.gGlobalDefines["EDK_TOOLS_PATH"], "BaseTools/Source/C/Common")))
+ SearchPathList.extend(str(item) for item in IncSearchList)
+ IncFileList = GetDependencyList(IncludeFileFullPaths, SearchPathList)
+ for include_file in IncFileList:
+ MakeApp += "$(OBJECTS) : %s\n" % include_file
+ if sys.platform == "win32":
+ PcdValueCommonPath = os.path.normpath(mws.join(GlobalData.gGlobalDefines["EDK_TOOLS_PATH"], "Source\C\Common\PcdValueCommon.c"))
+ MakeApp = MakeApp + '%s\PcdValueCommon.c : %s\n' % (self.OutputPath, PcdValueCommonPath)
+ MakeApp = MakeApp + '\tcopy /y %s $@\n' % (PcdValueCommonPath)
+ else:
+ PcdValueCommonPath = os.path.normpath(mws.join(GlobalData.gGlobalDefines["EDK_TOOLS_PATH"], "Source/C/Common/PcdValueCommon.c"))
+ MakeApp = MakeApp + '%s/PcdValueCommon.c : %s\n' % (self.OutputPath, PcdValueCommonPath)
+ MakeApp = MakeApp + '\tcp -f %s %s/PcdValueCommon.c\n' % (PcdValueCommonPath, self.OutputPath)
+ MakeFileName = os.path.join(self.OutputPath, 'Makefile')
+ MakeApp += "$(OBJECTS) : %s\n" % MakeFileName
+ SaveFileOnChange(MakeFileName, MakeApp, False)
+
+ # start generating input file
+ InputValueFile = os.path.join(self.OutputPath, 'Input.txt')
+ OutputValueFile = os.path.join(self.OutputPath, 'Output.txt')
+ SaveFileOnChange(InputValueFile, InitByteValue, False)
+
+ Dest_PcdValueInitExe = PcdValueInitName
+ if not sys.platform == "win32":
+ Dest_PcdValueInitExe = os.path.join(self.OutputPath, PcdValueInitName)
+ else:
+ Dest_PcdValueInitExe = os.path.join(self.OutputPath, PcdValueInitName) +".exe"
+
+ #start building the structure pcd value tool
+ Messages = ''
+ if sys.platform == "win32":
+ MakeCommand = 'nmake -f %s' % (MakeFileName)
+ returncode, StdOut, StdErr = DscBuildData.ExecuteCommand (MakeCommand)
+ Messages = StdOut
+ else:
+ MakeCommand = 'make -f %s' % (MakeFileName)
+ returncode, StdOut, StdErr = DscBuildData.ExecuteCommand (MakeCommand)
+ Messages = StdErr
+
+ EdkLogger.verbose ('%s\n%s\n%s' % (MakeCommand, StdOut, StdErr))
+ Messages = Messages.split('\n')
+ MessageGroup = []
+ if returncode != 0:
+ CAppBaseFileName = os.path.join(self.OutputPath, PcdValueInitName)
+ File = open (CAppBaseFileName + '.c', 'r')
+ FileData = File.readlines()
+ File.close()
+ for Message in Messages:
+ if " error" in Message or "warning" in Message:
+ try:
+ FileInfo = Message.strip().split('(')
+ if len (FileInfo) > 1:
+ FileName = FileInfo [0]
+ FileLine = FileInfo [1].split (')')[0]
+ else:
+ FileInfo = Message.strip().split(':')
+ if len(FileInfo) < 2:
+ continue
+ FileName = FileInfo [0]
+ FileLine = FileInfo [1]
+ except:
+ continue
+ if "PcdValueInit.c" not in FileName:
+ continue
+ if FileLine.isdigit():
+ error_line = FileData[int (FileLine) - 1]
+ if r"//" in error_line:
+ c_line, dsc_line = error_line.split(r"//")
+ else:
+ dsc_line = error_line
+ message_itmes = Message.split(":")
+ Index = 0
+ if "PcdValueInit.c" not in Message:
+ if not MessageGroup:
+ MessageGroup.append(Message)
+ break
+ else:
+ for item in message_itmes:
+ if "PcdValueInit.c" in item:
+ Index = message_itmes.index(item)
+ message_itmes[Index] = dsc_line.strip()
+ break
+ MessageGroup.append(":".join(message_itmes[Index:]).strip())
+ continue
+ else:
+ MessageGroup.append(Message)
+ if MessageGroup:
+ EdkLogger.error("build", PCD_STRUCTURE_PCD_ERROR, "\n".join(MessageGroup) )
+ else:
+ EdkLogger.error('Build', COMMAND_FAILURE, 'Can not execute command: %s\n%s\n%s' % (MakeCommand, StdOut, StdErr))
+
+ #start executing the structure pcd value tool
+ if DscBuildData.NeedUpdateOutput(OutputValueFile, Dest_PcdValueInitExe, InputValueFile):
+ Command = Dest_PcdValueInitExe + ' -i %s -o %s' % (InputValueFile, OutputValueFile)
+ returncode, StdOut, StdErr = DscBuildData.ExecuteCommand (Command)
+ EdkLogger.verbose ('%s\n%s\n%s' % (Command, StdOut, StdErr))
+ if returncode != 0:
+ EdkLogger.warn('Build', COMMAND_FAILURE, 'Can not collect output from command: %s\n%s\n' % (Command, StdOut, StdErr))
+
+ #start update structure pcd final value
+ File = open (OutputValueFile, 'r')
+ FileBuffer = File.readlines()
+ File.close()
+
+ StructurePcdSet = []
+ for Pcd in FileBuffer:
+ PcdValue = Pcd.split ('|')
+ PcdInfo = PcdValue[0].split ('.')
+ StructurePcdSet.append((PcdInfo[0], PcdInfo[1], PcdInfo[2], PcdInfo[3], PcdValue[2].strip()))
+ return StructurePcdSet
+
+ @staticmethod
+ def NeedUpdateOutput(OutputFile, ValueCFile, StructureInput):
+ if not os.path.exists(OutputFile):
+ return True
+ if os.stat(OutputFile).st_mtime <= os.stat(ValueCFile).st_mtime:
+ return True
+ if os.stat(OutputFile).st_mtime <= os.stat(StructureInput).st_mtime:
+ return True
+ return False
+
+ ## Retrieve dynamic PCD settings
+ #
+ # @param Type PCD type
+ #
+ # @retval a dict object contains settings of given PCD type
+ #
+ def _GetDynamicPcd(self, Type):
+
+
+ Pcds = OrderedDict()
+ #
+ # tdict is a special dict kind of type, used for selecting correct
+ # PCD settings for certain ARCH and SKU
+ #
+ PcdDict = tdict(True, 4)
+ PcdList = []
+ # Find out all possible PCD candidates for self._Arch
+ RecordList = self._RawData[Type, self._Arch]
+ AvailableSkuIdSet = copy.copy(self.SkuIds)
+
+
+ for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4, Dummy5 in RecordList:
+ SkuName = SkuName.upper()
+ SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
+ if SkuName not in AvailableSkuIdSet:
+ EdkLogger.error('build', PARAMETER_INVALID, 'Sku %s is not defined in [SkuIds] section' % SkuName,
+ File=self.MetaFile, Line=Dummy5)
+ if "." not in TokenSpaceGuid and "[" not in PcdCName and (PcdCName, TokenSpaceGuid, SkuName, Dummy5) not in PcdList:
+ PcdList.append((PcdCName, TokenSpaceGuid, SkuName, Dummy5))
+ PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
+
+ # Remove redundant PCD candidates, per the ARCH and SKU
+ for PcdCName, TokenSpaceGuid, SkuName, Dummy4 in PcdList:
+
+ Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid]
+ if Setting is None:
+ continue
+
+ PcdValue, DatumType, MaxDatumSize = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
+ if MaxDatumSize:
+ if int(MaxDatumSize, 0) > 0xFFFF:
+ EdkLogger.error('build', FORMAT_INVALID, "The size value must not exceed the maximum value of 0xFFFF (UINT16) for %s." % ".".join((TokenSpaceGuid, PcdCName)),
+ File=self.MetaFile, Line=Dummy4)
+ if int(MaxDatumSize, 0) < 0:
+ EdkLogger.error('build', FORMAT_INVALID, "The size value can't be set to negative value for %s." % ".".join((TokenSpaceGuid, PcdCName)),
+ File=self.MetaFile, Line=Dummy4)
+ SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName][0], '', '', '', '', '', PcdValue)
+ if (PcdCName, TokenSpaceGuid) in Pcds:
+ pcdObject = Pcds[PcdCName, TokenSpaceGuid]
+ pcdObject.SkuInfoList[SkuName] = SkuInfo
+ if MaxDatumSize.strip():
+ CurrentMaxSize = int(MaxDatumSize.strip(), 0)
+ else:
+ CurrentMaxSize = 0
+ if pcdObject.MaxDatumSize:
+ PcdMaxSize = int(pcdObject.MaxDatumSize, 0)
+ else:
+ PcdMaxSize = 0
+ if CurrentMaxSize > PcdMaxSize:
+ pcdObject.MaxDatumSize = str(CurrentMaxSize)
+ else:
+ Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
+ PcdCName,
+ TokenSpaceGuid,
+ self._PCD_TYPE_STRING_[Type],
+ DatumType,
+ PcdValue,
+ '',
+ MaxDatumSize,
+ OrderedDict({SkuName : SkuInfo}),
+ False,
+ None,
+ IsDsc=True)
+
+ if SkuName not in Pcds[PcdCName, TokenSpaceGuid].DscRawValue:
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName] = {}
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName] = {}
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName][TAB_DEFAULT_STORES_DEFAULT] = PcdValue
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName][TAB_DEFAULT_STORES_DEFAULT] = (self.MetaFile.File,Dummy4)
+
+ for pcd in Pcds.values():
+ pcdDecObject = self._DecPcds[pcd.TokenCName, pcd.TokenSpaceGuidCName]
+ # Only fix the value while no value provided in DSC file.
+ for sku in pcd.SkuInfoList.values():
+ if not sku.DefaultValue:
+ sku.DefaultValue = pcdDecObject.DefaultValue
+ if TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON not in pcd.SkuInfoList:
+ valuefromDec = pcdDecObject.DefaultValue
+ SkuInfo = SkuInfoClass(TAB_DEFAULT, '0', '', '', '', '', '', valuefromDec)
+ pcd.SkuInfoList[TAB_DEFAULT] = SkuInfo
+ elif TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
+ pcd.SkuInfoList[TAB_DEFAULT] = pcd.SkuInfoList[TAB_COMMON]
+ del pcd.SkuInfoList[TAB_COMMON]
+ elif TAB_DEFAULT in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
+ del pcd.SkuInfoList[TAB_COMMON]
+
+ list(map(self.FilterSkuSettings, Pcds.values()))
+
+ return Pcds
+
+ def FilterSkuSettings(self, PcdObj):
+
+ if self.SkuIdMgr.SkuUsageType == self.SkuIdMgr.SINGLE:
+ if TAB_DEFAULT in PcdObj.SkuInfoList and self.SkuIdMgr.SystemSkuId not in PcdObj.SkuInfoList:
+ PcdObj.SkuInfoList[self.SkuIdMgr.SystemSkuId] = PcdObj.SkuInfoList[TAB_DEFAULT]
+ PcdObj.SkuInfoList = {TAB_DEFAULT:PcdObj.SkuInfoList[self.SkuIdMgr.SystemSkuId]}
+ PcdObj.SkuInfoList[TAB_DEFAULT].SkuIdName = TAB_DEFAULT
+ PcdObj.SkuInfoList[TAB_DEFAULT].SkuId = '0'
+
+ elif self.SkuIdMgr.SkuUsageType == self.SkuIdMgr.DEFAULT:
+ PcdObj.SkuInfoList = {TAB_DEFAULT:PcdObj.SkuInfoList[TAB_DEFAULT]}
+
+ return PcdObj
+
+ @staticmethod
+ def CompareVarAttr(Attr1, Attr2):
+ if not Attr1 or not Attr2: # for empty string
+ return True
+ Attr1s = [attr.strip() for attr in Attr1.split(",")]
+ Attr1Set = set(Attr1s)
+ Attr2s = [attr.strip() for attr in Attr2.split(",")]
+ Attr2Set = set(Attr2s)
+ if Attr2Set == Attr1Set:
+ return True
+ else:
+ return False
+
+ def CompletePcdValues(self, PcdSet):
+ Pcds = OrderedDict()
+ DefaultStoreObj = DefaultStore(self._GetDefaultStores())
+ SkuIds = {skuname:skuid for skuname, skuid in self.SkuIdMgr.AvailableSkuIdSet.items() if skuname != TAB_COMMON}
+ DefaultStores = set(storename for pcdobj in PcdSet.values() for skuobj in pcdobj.SkuInfoList.values() for storename in skuobj.DefaultStoreDict)
+ for PcdCName, TokenSpaceGuid in PcdSet:
+ PcdObj = PcdSet[(PcdCName, TokenSpaceGuid)]
+
+ if PcdObj.Type not in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_DEFAULT],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_VPD],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_DEFAULT],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII],
+ self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_VPD]]:
+ Pcds[PcdCName, TokenSpaceGuid]= PcdObj
+ continue
+ PcdType = PcdObj.Type
+ if PcdType in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
+ for skuid in PcdObj.SkuInfoList:
+ skuobj = PcdObj.SkuInfoList[skuid]
+ mindefaultstorename = DefaultStoreObj.GetMin(set(defaultstorename for defaultstorename in skuobj.DefaultStoreDict))
+ for defaultstorename in DefaultStores:
+ if defaultstorename not in skuobj.DefaultStoreDict:
+ skuobj.DefaultStoreDict[defaultstorename] = skuobj.DefaultStoreDict[mindefaultstorename]
+ skuobj.HiiDefaultValue = skuobj.DefaultStoreDict[mindefaultstorename]
+ for skuname, skuid in SkuIds.items():
+ if skuname not in PcdObj.SkuInfoList:
+ nextskuid = self.SkuIdMgr.GetNextSkuId(skuname)
+ while nextskuid not in PcdObj.SkuInfoList:
+ nextskuid = self.SkuIdMgr.GetNextSkuId(nextskuid)
+ PcdObj.SkuInfoList[skuname] = copy.deepcopy(PcdObj.SkuInfoList[nextskuid])
+ PcdObj.SkuInfoList[skuname].SkuId = skuid
+ PcdObj.SkuInfoList[skuname].SkuIdName = skuname
+ if PcdType in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
+ PcdObj.DefaultValue = list(PcdObj.SkuInfoList.values())[0].HiiDefaultValue if self.SkuIdMgr.SkuUsageType == self.SkuIdMgr.SINGLE else PcdObj.SkuInfoList[TAB_DEFAULT].HiiDefaultValue
+ Pcds[PcdCName, TokenSpaceGuid]= PcdObj
+ return Pcds
+ ## Retrieve dynamic HII PCD settings
+ #
+ # @param Type PCD type
+ #
+ # @retval a dict object contains settings of given PCD type
+ #
+ def _GetDynamicHiiPcd(self, Type):
+
+ VariableAttrs = {}
+
+ Pcds = OrderedDict()
+ UserDefinedDefaultStores = []
+ #
+ # tdict is a special dict kind of type, used for selecting correct
+ # PCD settings for certain ARCH and SKU
+ #
+ PcdDict = tdict(True, 5)
+ PcdList = []
+ RecordList = self._RawData[Type, self._Arch]
+ # Find out all possible PCD candidates for self._Arch
+ AvailableSkuIdSet = copy.copy(self.SkuIds)
+ DefaultStoresDefine = self._GetDefaultStores()
+
+ for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, DefaultStore, Dummy4, Dummy5 in RecordList:
+ SkuName = SkuName.upper()
+ SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
+ DefaultStore = DefaultStore.upper()
+ if DefaultStore == TAB_COMMON:
+ DefaultStore = TAB_DEFAULT_STORES_DEFAULT
+ else:
+ #The end user define [DefaultStores] and [SKUID_IDENTIFIER.Menufacturing] in DSC
+ UserDefinedDefaultStores.append((PcdCName, TokenSpaceGuid))
+ if SkuName not in AvailableSkuIdSet:
+ EdkLogger.error('build', PARAMETER_INVALID, 'Sku %s is not defined in [SkuIds] section' % SkuName,
+ File=self.MetaFile, Line=Dummy5)
+ if DefaultStore not in DefaultStoresDefine:
+ EdkLogger.error('build', PARAMETER_INVALID, 'DefaultStores %s is not defined in [DefaultStores] section' % DefaultStore,
+ File=self.MetaFile, Line=Dummy5)
+ if "." not in TokenSpaceGuid and "[" not in PcdCName and (PcdCName, TokenSpaceGuid, SkuName, DefaultStore, Dummy5) not in PcdList:
+ PcdList.append((PcdCName, TokenSpaceGuid, SkuName, DefaultStore, Dummy5))
+ PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid, DefaultStore] = Setting
+
+
+ # Remove redundant PCD candidates, per the ARCH and SKU
+ for index,(PcdCName, TokenSpaceGuid, SkuName, DefaultStore, Dummy4) in enumerate(PcdList):
+
+ Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid, DefaultStore]
+ if Setting is None:
+ continue
+ VariableName, VariableGuid, VariableOffset, DefaultValue, VarAttribute = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
+
+ rt, Msg = VariableAttributes.ValidateVarAttributes(VarAttribute)
+ if not rt:
+ EdkLogger.error("build", PCD_VARIABLE_ATTRIBUTES_ERROR, "Variable attributes settings for %s is incorrect.\n %s" % (".".join((TokenSpaceGuid, PcdCName)), Msg),
+ ExtraData="[%s]" % VarAttribute)
+ ExceedMax = False
+ FormatCorrect = True
+ if VariableOffset.isdigit():
+ if int(VariableOffset, 10) > 0xFFFF:
+ ExceedMax = True
+ elif variablePattern.match(VariableOffset):
+ if int(VariableOffset, 16) > 0xFFFF:
+ ExceedMax = True
+ # For Offset written in "A.B"
+ elif VariableOffset.find('.') > -1:
+ VariableOffsetList = VariableOffset.split(".")
+ if not (len(VariableOffsetList) == 2
+ and IsValidWord(VariableOffsetList[0])
+ and IsValidWord(VariableOffsetList[1])):
+ FormatCorrect = False
+ else:
+ FormatCorrect = False
+ if not FormatCorrect:
+ EdkLogger.error('Build', FORMAT_INVALID, "Invalid syntax or format of the variable offset value is incorrect for %s." % ".".join((TokenSpaceGuid, PcdCName)))
+
+ if ExceedMax:
+ EdkLogger.error('Build', OPTION_VALUE_INVALID, "The variable offset value must not exceed the maximum value of 0xFFFF (UINT16) for %s." % ".".join((TokenSpaceGuid, PcdCName)))
+ if (VariableName, VariableGuid) not in VariableAttrs:
+ VariableAttrs[(VariableName, VariableGuid)] = VarAttribute
+ else:
+ if not DscBuildData.CompareVarAttr(VariableAttrs[(VariableName, VariableGuid)], VarAttribute):
+ EdkLogger.error('Build', PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR, "The variable %s.%s for DynamicHii PCDs has conflicting attributes [%s] and [%s] " % (VariableGuid, VariableName, VarAttribute, VariableAttrs[(VariableName, VariableGuid)]))
+
+ pcdDecObject = self._DecPcds[PcdCName, TokenSpaceGuid]
+ if (PcdCName, TokenSpaceGuid) in Pcds:
+ pcdObject = Pcds[PcdCName, TokenSpaceGuid]
+ if SkuName in pcdObject.SkuInfoList:
+ Skuitem = pcdObject.SkuInfoList[SkuName]
+ Skuitem.DefaultStoreDict.update({DefaultStore:DefaultValue})
+ else:
+ SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName][0], VariableName, VariableGuid, VariableOffset, DefaultValue, VariableAttribute=VarAttribute, DefaultStore={DefaultStore:DefaultValue})
+ pcdObject.SkuInfoList[SkuName] = SkuInfo
+ else:
+ SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName][0], VariableName, VariableGuid, VariableOffset, DefaultValue, VariableAttribute=VarAttribute, DefaultStore={DefaultStore:DefaultValue})
+ PcdClassObj = PcdClassObject(
+ PcdCName,
+ TokenSpaceGuid,
+ self._PCD_TYPE_STRING_[Type],
+ '',
+ DefaultValue,
+ '',
+ '',
+ OrderedDict({SkuName : SkuInfo}),
+ False,
+ None,
+ pcdDecObject.validateranges,
+ pcdDecObject.validlists,
+ pcdDecObject.expressions,
+ IsDsc=True)
+ if (PcdCName, TokenSpaceGuid) in UserDefinedDefaultStores:
+ PcdClassObj.UserDefinedDefaultStoresFlag = True
+ Pcds[PcdCName, TokenSpaceGuid] = PcdClassObj
+
+ Pcds[PcdCName, TokenSpaceGuid].CustomAttribute['DscPosition'] = index
+ if SkuName not in Pcds[PcdCName, TokenSpaceGuid].DscRawValue:
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName] = {}
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName] = {}
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName][DefaultStore] = DefaultValue
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName][DefaultStore] = (self.MetaFile.File,Dummy4)
+ for pcd in Pcds.values():
+ pcdDecObject = self._DecPcds[pcd.TokenCName, pcd.TokenSpaceGuidCName]
+ pcd.DatumType = pcdDecObject.DatumType
+ # Only fix the value while no value provided in DSC file.
+ for sku in pcd.SkuInfoList.values():
+ if (sku.HiiDefaultValue == "" or sku.HiiDefaultValue is None):
+ sku.HiiDefaultValue = pcdDecObject.DefaultValue
+ for default_store in sku.DefaultStoreDict:
+ sku.DefaultStoreDict[default_store]=pcdDecObject.DefaultValue
+ pcd.DefaultValue = pcdDecObject.DefaultValue
+ if TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON not in pcd.SkuInfoList:
+ SkuInfoObj = list(pcd.SkuInfoList.values())[0]
+ valuefromDec = pcdDecObject.DefaultValue
+ SkuInfo = SkuInfoClass(TAB_DEFAULT, '0', SkuInfoObj.VariableName, SkuInfoObj.VariableGuid, SkuInfoObj.VariableOffset, valuefromDec, VariableAttribute=SkuInfoObj.VariableAttribute, DefaultStore={DefaultStore:valuefromDec})
+ pcd.SkuInfoList[TAB_DEFAULT] = SkuInfo
+ elif TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
+ pcd.SkuInfoList[TAB_DEFAULT] = pcd.SkuInfoList[TAB_COMMON]
+ del pcd.SkuInfoList[TAB_COMMON]
+ elif TAB_DEFAULT in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
+ del pcd.SkuInfoList[TAB_COMMON]
+
+ if pcd.MaxDatumSize.strip():
+ MaxSize = int(pcd.MaxDatumSize, 0)
+ else:
+ MaxSize = 0
+ if pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ for (_, skuobj) in pcd.SkuInfoList.items():
+ datalen = 0
+ skuobj.HiiDefaultValue = StringToArray(skuobj.HiiDefaultValue)
+ datalen = len(skuobj.HiiDefaultValue.split(","))
+ if datalen > MaxSize:
+ MaxSize = datalen
+ for defaultst in skuobj.DefaultStoreDict:
+ skuobj.DefaultStoreDict[defaultst] = StringToArray(skuobj.DefaultStoreDict[defaultst])
+ pcd.DefaultValue = StringToArray(pcd.DefaultValue)
+ pcd.MaxDatumSize = str(MaxSize)
+ rt, invalidhii = DscBuildData.CheckVariableNameAssignment(Pcds)
+ if not rt:
+ invalidpcd = ",".join(invalidhii)
+ EdkLogger.error('build', PCD_VARIABLE_INFO_ERROR, Message='The same HII PCD must map to the same EFI variable for all SKUs', File=self.MetaFile, ExtraData=invalidpcd)
+
+ list(map(self.FilterSkuSettings, Pcds.values()))
+
+ return Pcds
+
+ @staticmethod
+ def CheckVariableNameAssignment(Pcds):
+ invalidhii = []
+ for pcdname in Pcds:
+ pcd = Pcds[pcdname]
+ varnameset = set(sku.VariableName for (skuid, sku) in pcd.SkuInfoList.items())
+ if len(varnameset) > 1:
+ invalidhii.append(".".join((pcdname[1], pcdname[0])))
+ if len(invalidhii):
+ return False, invalidhii
+ else:
+ return True, []
+ ## Retrieve dynamic VPD PCD settings
+ #
+ # @param Type PCD type
+ #
+ # @retval a dict object contains settings of given PCD type
+ #
+ def _GetDynamicVpdPcd(self, Type):
+
+
+ Pcds = OrderedDict()
+ #
+ # tdict is a special dict kind of type, used for selecting correct
+ # PCD settings for certain ARCH and SKU
+ #
+ PcdDict = tdict(True, 4)
+ PcdList = []
+
+ # Find out all possible PCD candidates for self._Arch
+ RecordList = self._RawData[Type, self._Arch]
+ AvailableSkuIdSet = copy.copy(self.SkuIds)
+
+ for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4, Dummy5 in RecordList:
+ SkuName = SkuName.upper()
+ SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
+ if SkuName not in AvailableSkuIdSet:
+ EdkLogger.error('build', PARAMETER_INVALID, 'Sku %s is not defined in [SkuIds] section' % SkuName,
+ File=self.MetaFile, Line=Dummy5)
+ if "." not in TokenSpaceGuid and "[" not in PcdCName and (PcdCName, TokenSpaceGuid, SkuName, Dummy5) not in PcdList:
+ PcdList.append((PcdCName, TokenSpaceGuid, SkuName, Dummy5))
+ PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
+
+ # Remove redundant PCD candidates, per the ARCH and SKU
+ for PcdCName, TokenSpaceGuid, SkuName, Dummy4 in PcdList:
+ Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid]
+ if Setting is None:
+ continue
+ #
+ # For the VOID* type, it can have optional data of MaxDatumSize and InitialValue
+ # For the Integer & Boolean type, the optional data can only be InitialValue.
+ # At this point, we put all the data into the PcdClssObject for we don't know the PCD's datumtype
+ # until the DEC parser has been called.
+ #
+ VpdOffset, MaxDatumSize, InitialValue = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
+ if MaxDatumSize:
+ if int(MaxDatumSize, 0) > 0xFFFF:
+ EdkLogger.error('build', FORMAT_INVALID, "The size value must not exceed the maximum value of 0xFFFF (UINT16) for %s." % ".".join((TokenSpaceGuid, PcdCName)),
+ File=self.MetaFile, Line=Dummy4)
+ if int(MaxDatumSize, 0) < 0:
+ EdkLogger.error('build', FORMAT_INVALID, "The size value can't be set to negative value for %s." % ".".join((TokenSpaceGuid, PcdCName)),
+ File=self.MetaFile, Line=Dummy4)
+ SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName][0], '', '', '', '', VpdOffset, InitialValue)
+ if (PcdCName, TokenSpaceGuid) in Pcds:
+ pcdObject = Pcds[PcdCName, TokenSpaceGuid]
+ pcdObject.SkuInfoList[SkuName] = SkuInfo
+ if MaxDatumSize.strip():
+ CurrentMaxSize = int(MaxDatumSize.strip(), 0)
+ else:
+ CurrentMaxSize = 0
+ if pcdObject.MaxDatumSize:
+ PcdMaxSize = int(pcdObject.MaxDatumSize, 0)
+ else:
+ PcdMaxSize = 0
+ if CurrentMaxSize > PcdMaxSize:
+ pcdObject.MaxDatumSize = str(CurrentMaxSize)
+ else:
+ Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
+ PcdCName,
+ TokenSpaceGuid,
+ self._PCD_TYPE_STRING_[Type],
+ '',
+ InitialValue,
+ '',
+ MaxDatumSize,
+ OrderedDict({SkuName : SkuInfo}),
+ False,
+ None,
+ IsDsc=True)
+
+ if SkuName not in Pcds[PcdCName, TokenSpaceGuid].DscRawValue:
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName] = {}
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName] = {}
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName][TAB_DEFAULT_STORES_DEFAULT] = InitialValue
+ Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName][TAB_DEFAULT_STORES_DEFAULT] = (self.MetaFile.File,Dummy4)
+ for pcd in Pcds.values():
+ pcdDecObject = self._DecPcds[pcd.TokenCName, pcd.TokenSpaceGuidCName]
+ pcd.DatumType = pcdDecObject.DatumType
+ # Only fix the value while no value provided in DSC file.
+ for sku in pcd.SkuInfoList.values():
+ if not sku.DefaultValue:
+ sku.DefaultValue = pcdDecObject.DefaultValue
+ if TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON not in pcd.SkuInfoList:
+ SkuInfoObj = list(pcd.SkuInfoList.values())[0]
+ valuefromDec = pcdDecObject.DefaultValue
+ SkuInfo = SkuInfoClass(TAB_DEFAULT, '0', '', '', '', '', SkuInfoObj.VpdOffset, valuefromDec)
+ pcd.SkuInfoList[TAB_DEFAULT] = SkuInfo
+ elif TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
+ pcd.SkuInfoList[TAB_DEFAULT] = pcd.SkuInfoList[TAB_COMMON]
+ del pcd.SkuInfoList[TAB_COMMON]
+ elif TAB_DEFAULT in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
+ del pcd.SkuInfoList[TAB_COMMON]
+
+ #For the same one VOID* pcd, if the default value type of one SKU is "Unicode string",
+ #the other SKUs are "OtherVOID*"(ASCII string or byte array),Then convert "Unicode string" to "byte array".
+ for pcd in Pcds.values():
+ PcdValueTypeSet = set()
+ for sku in pcd.SkuInfoList.values():
+ PcdValueTypeSet.add("UnicodeString" if sku.DefaultValue.startswith(('L"',"L'")) else "OtherVOID*")
+ if len(PcdValueTypeSet) > 1:
+ for sku in pcd.SkuInfoList.values():
+ sku.DefaultValue = StringToArray(sku.DefaultValue) if sku.DefaultValue.startswith(('L"',"L'")) else sku.DefaultValue
+
+ list(map(self.FilterSkuSettings, Pcds.values()))
+ return Pcds
+
+ ## Add external modules
+ #
+ # The external modules are mostly those listed in FDF file, which don't
+ # need "build".
+ #
+ # @param FilePath The path of module description file
+ #
+ def AddModule(self, FilePath):
+ FilePath = NormPath(FilePath)
+ if FilePath not in self.Modules:
+ Module = ModuleBuildClassObject()
+ Module.MetaFile = FilePath
+ self.Modules.append(Module)
+
+ @property
+ def ToolChainFamily(self):
+ self._ToolChainFamily = TAB_COMPILER_MSFT
+ TargetObj = TargetTxtDict()
+ TargetTxt = TargetObj.Target
+ BuildConfigurationFile = os.path.normpath(os.path.join(GlobalData.gConfDirectory, "target.txt"))
+ if os.path.isfile(BuildConfigurationFile) == True:
+ ToolDefinitionFile = TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF]
+ if ToolDefinitionFile == '':
+ ToolDefinitionFile = "tools_def.txt"
+ ToolDefinitionFile = os.path.normpath(mws.join(self.WorkspaceDir, 'Conf', ToolDefinitionFile))
+ if os.path.isfile(ToolDefinitionFile) == True:
+ ToolDefObj = ToolDefDict((os.path.join(os.getenv("WORKSPACE"), "Conf")))
+ ToolDefinition = ToolDefObj.ToolDef.ToolsDefTxtDatabase
+ if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \
+ or self._Toolchain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \
+ or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][self._Toolchain]:
+ self._ToolChainFamily = TAB_COMPILER_MSFT
+ else:
+ self._ToolChainFamily = ToolDefinition[TAB_TOD_DEFINES_FAMILY][self._Toolchain]
+ return self._ToolChainFamily
+
+ ## Add external PCDs
+ #
+ # The external PCDs are mostly those listed in FDF file to specify address
+ # or offset information.
+ #
+ # @param Name Name of the PCD
+ # @param Guid Token space guid of the PCD
+ # @param Value Value of the PCD
+ #
+ def AddPcd(self, Name, Guid, Value):
+ if (Name, Guid) not in self.Pcds:
+ self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, False, None)
+ self.Pcds[Name, Guid].DefaultValue = Value
+
+ @property
+ def DecPcds(self):
+ if self._DecPcds is None:
+ FdfInfList = []
+ if GlobalData.gFdfParser:
+ FdfInfList = GlobalData.gFdfParser.Profile.InfList
+ PkgSet = set()
+ for Inf in FdfInfList:
+ ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch=self._Arch)
+ if ModuleFile in self._Modules:
+ continue
+ ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]
+ PkgSet.update(ModuleData.Packages)
+ if self.Packages:
+ PkgSet.update(self.Packages)
+ self._DecPcds, self._GuidDict = GetDeclaredPcd(self, self._Bdb, self._Arch, self._Target, self._Toolchain, PkgSet)
+ self._GuidDict.update(GlobalData.gPlatformPcds)
+ return self._DecPcds
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/InfBuildData.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/InfBuildData.py
new file mode 100755
index 00000000..8e67e984
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/InfBuildData.py
@@ -0,0 +1,1064 @@
+## @file
+# This file is used to create a database used by build tool
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# (C) Copyright 2016 Hewlett Packard Enterprise Development LP<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import absolute_import
+from Common.DataType import *
+from Common.Misc import *
+from Common.caching import cached_property, cached_class_function
+from types import *
+from .MetaFileParser import *
+from collections import OrderedDict
+from Workspace.BuildClassObject import ModuleBuildClassObject, LibraryClassObject, PcdClassObject
+
+## Get Protocol value from given packages
+#
+# @param CName The CName of the GUID
+# @param PackageList List of packages looking-up in
+# @param Inffile The driver file
+#
+# @retval GuidValue if the CName is found in any given package
+# @retval None if the CName is not found in all given packages
+#
+def _ProtocolValue(CName, PackageList, Inffile = None):
+ for P in PackageList:
+ ProtocolKeys = list(P.Protocols.keys())
+ if Inffile and P._PrivateProtocols:
+ if not Inffile.startswith(P.MetaFile.Dir):
+ ProtocolKeys = [x for x in P.Protocols if x not in P._PrivateProtocols]
+ if CName in ProtocolKeys:
+ return P.Protocols[CName]
+ return None
+
+## Get PPI value from given packages
+#
+# @param CName The CName of the GUID
+# @param PackageList List of packages looking-up in
+# @param Inffile The driver file
+#
+# @retval GuidValue if the CName is found in any given package
+# @retval None if the CName is not found in all given packages
+#
+def _PpiValue(CName, PackageList, Inffile = None):
+ for P in PackageList:
+ PpiKeys = list(P.Ppis.keys())
+ if Inffile and P._PrivatePpis:
+ if not Inffile.startswith(P.MetaFile.Dir):
+ PpiKeys = [x for x in P.Ppis if x not in P._PrivatePpis]
+ if CName in PpiKeys:
+ return P.Ppis[CName]
+ return None
+
+## Module build information from INF file
+#
+# This class is used to retrieve information stored in database and convert them
+# into ModuleBuildClassObject form for easier use for AutoGen.
+#
+class InfBuildData(ModuleBuildClassObject):
+ # dict used to convert PCD type in database to string used by build tool
+ _PCD_TYPE_STRING_ = {
+ MODEL_PCD_FIXED_AT_BUILD : TAB_PCDS_FIXED_AT_BUILD,
+ MODEL_PCD_PATCHABLE_IN_MODULE : TAB_PCDS_PATCHABLE_IN_MODULE,
+ MODEL_PCD_FEATURE_FLAG : TAB_PCDS_FEATURE_FLAG,
+ MODEL_PCD_DYNAMIC : TAB_PCDS_DYNAMIC,
+ MODEL_PCD_DYNAMIC_DEFAULT : TAB_PCDS_DYNAMIC,
+ MODEL_PCD_DYNAMIC_HII : TAB_PCDS_DYNAMIC_HII,
+ MODEL_PCD_DYNAMIC_VPD : TAB_PCDS_DYNAMIC_VPD,
+ MODEL_PCD_DYNAMIC_EX : TAB_PCDS_DYNAMIC_EX,
+ MODEL_PCD_DYNAMIC_EX_DEFAULT : TAB_PCDS_DYNAMIC_EX,
+ MODEL_PCD_DYNAMIC_EX_HII : TAB_PCDS_DYNAMIC_EX_HII,
+ MODEL_PCD_DYNAMIC_EX_VPD : TAB_PCDS_DYNAMIC_EX_VPD,
+ }
+
+ # dict used to convert part of [Defines] to members of InfBuildData directly
+ _PROPERTY_ = {
+ #
+ # Required Fields
+ #
+ TAB_INF_DEFINES_BASE_NAME : "_BaseName",
+ TAB_INF_DEFINES_FILE_GUID : "_Guid",
+ TAB_INF_DEFINES_MODULE_TYPE : "_ModuleType",
+ #
+ # Optional Fields
+ #
+ # TAB_INF_DEFINES_INF_VERSION : "_AutoGenVersion",
+ TAB_INF_DEFINES_COMPONENT_TYPE : "_ComponentType",
+ TAB_INF_DEFINES_MAKEFILE_NAME : "_MakefileName",
+ # TAB_INF_DEFINES_CUSTOM_MAKEFILE : "_CustomMakefile",
+ TAB_INF_DEFINES_DPX_SOURCE :"_DxsFile",
+ TAB_INF_DEFINES_VERSION_NUMBER : "_Version",
+ TAB_INF_DEFINES_VERSION_STRING : "_Version",
+ TAB_INF_DEFINES_VERSION : "_Version",
+ TAB_INF_DEFINES_PCD_IS_DRIVER : "_PcdIsDriver",
+ TAB_INF_DEFINES_SHADOW : "_Shadow"
+ }
+
+ # regular expression for converting XXX_FLAGS in [nmake] section to new type
+ _NMAKE_FLAG_PATTERN_ = re.compile("(?:EBC_)?([A-Z]+)_(?:STD_|PROJ_|ARCH_)?FLAGS(?:_DLL|_ASL|_EXE)?", re.UNICODE)
+ # dict used to convert old tool name used in [nmake] section to new ones
+ _TOOL_CODE_ = {
+ "C" : "CC",
+ BINARY_FILE_TYPE_LIB : "SLINK",
+ "LINK" : "DLINK",
+ }
+
+
+ ## Constructor of InfBuildData
+ #
+ # Initialize object of InfBuildData
+ #
+ # @param FilePath The path of platform description file
+ # @param RawData The raw data of DSC file
+ # @param BuildDataBase Database used to retrieve module/package information
+ # @param Arch The target architecture
+ # @param Platform The name of platform employing this module
+ # @param Macros Macros used for replacement in DSC file
+ #
+ def __init__(self, FilePath, RawData, BuildDatabase, Arch=TAB_ARCH_COMMON, Target=None, Toolchain=None):
+ self.MetaFile = FilePath
+ self._ModuleDir = FilePath.Dir
+ self._RawData = RawData
+ self._Bdb = BuildDatabase
+ self._Arch = Arch
+ self._Target = Target
+ self._Toolchain = Toolchain
+ self._Platform = TAB_COMMON
+ self._TailComments = None
+ self._BaseName = None
+ self._DxsFile = None
+ self._ModuleType = None
+ self._ComponentType = None
+ self._BuildType = None
+ self._Guid = None
+ self._Version = None
+ self._PcdIsDriver = None
+ self._BinaryModule = None
+ self._Shadow = None
+ self._MakefileName = None
+ self._CustomMakefile = None
+ self._Specification = None
+ self._LibraryClass = None
+ self._ModuleEntryPointList = None
+ self._ModuleUnloadImageList = None
+ self._ConstructorList = None
+ self._DestructorList = None
+ self._Defs = OrderedDict()
+ self._ProtocolComments = None
+ self._PpiComments = None
+ self._GuidsUsedByPcd = OrderedDict()
+ self._GuidComments = None
+ self._PcdComments = None
+ self._BuildOptions = None
+ self._DependencyFileList = None
+ self.LibInstances = []
+ self.ReferenceModules = set()
+
+ def SetReferenceModule(self,Module):
+ self.ReferenceModules.add(Module)
+ return self
+
+ ## XXX[key] = value
+ def __setitem__(self, key, value):
+ self.__dict__[self._PROPERTY_[key]] = value
+
+ ## value = XXX[key]
+ def __getitem__(self, key):
+ return self.__dict__[self._PROPERTY_[key]]
+
+ ## "in" test support
+ def __contains__(self, key):
+ return key in self._PROPERTY_
+
+ ## Get current effective macros
+ @cached_property
+ def _Macros(self):
+ RetVal = {}
+ return RetVal
+
+ ## Get architecture
+ @cached_property
+ def Arch(self):
+ return self._Arch
+
+ ## Return the name of platform employing this module
+ @cached_property
+ def Platform(self):
+ return self._Platform
+
+ @cached_property
+ def HeaderComments(self):
+ return [a[0] for a in self._RawData[MODEL_META_DATA_HEADER_COMMENT]]
+
+ @cached_property
+ def TailComments(self):
+ return [a[0] for a in self._RawData[MODEL_META_DATA_TAIL_COMMENT]]
+
+ ## Retrieve all information in [Defines] section
+ #
+ # (Retrieving all [Defines] information in one-shot is just to save time.)
+ #
+ @cached_class_function
+ def _GetHeaderInfo(self):
+ RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, self._Platform]
+ for Record in RecordList:
+ Name, Value = Record[1], ReplaceMacro(Record[2], self._Macros, False)
+ # items defined _PROPERTY_ don't need additional processing
+ if Name in self:
+ self[Name] = Value
+ self._Defs[Name] = Value
+ self._Macros[Name] = Value
+ # some special items in [Defines] section need special treatment
+ elif Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION', 'EDK_RELEASE_VERSION', 'PI_SPECIFICATION_VERSION'):
+ if Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION'):
+ Name = 'UEFI_SPECIFICATION_VERSION'
+ if self._Specification is None:
+ self._Specification = OrderedDict()
+ self._Specification[Name] = GetHexVerValue(Value)
+ if self._Specification[Name] is None:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
+ "'%s' format is not supported for %s" % (Value, Name),
+ File=self.MetaFile, Line=Record[-1])
+ elif Name == 'LIBRARY_CLASS':
+ if self._LibraryClass is None:
+ self._LibraryClass = []
+ ValueList = GetSplitValueList(Value)
+ LibraryClass = ValueList[0]
+ if len(ValueList) > 1:
+ SupModuleList = GetSplitValueList(ValueList[1], ' ')
+ else:
+ SupModuleList = SUP_MODULE_LIST
+ self._LibraryClass.append(LibraryClassObject(LibraryClass, SupModuleList))
+ elif Name == 'ENTRY_POINT':
+ if self._ModuleEntryPointList is None:
+ self._ModuleEntryPointList = []
+ self._ModuleEntryPointList.append(Value)
+ elif Name == 'UNLOAD_IMAGE':
+ if self._ModuleUnloadImageList is None:
+ self._ModuleUnloadImageList = []
+ if not Value:
+ continue
+ self._ModuleUnloadImageList.append(Value)
+ elif Name == 'CONSTRUCTOR':
+ if self._ConstructorList is None:
+ self._ConstructorList = []
+ if not Value:
+ continue
+ self._ConstructorList.append(Value)
+ elif Name == 'DESTRUCTOR':
+ if self._DestructorList is None:
+ self._DestructorList = []
+ if not Value:
+ continue
+ self._DestructorList.append(Value)
+ elif Name == TAB_INF_DEFINES_CUSTOM_MAKEFILE:
+ TokenList = GetSplitValueList(Value)
+ if self._CustomMakefile is None:
+ self._CustomMakefile = {}
+ if len(TokenList) < 2:
+ self._CustomMakefile[TAB_COMPILER_MSFT] = TokenList[0]
+ self._CustomMakefile['GCC'] = TokenList[0]
+ else:
+ if TokenList[0] not in [TAB_COMPILER_MSFT, 'GCC']:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
+ "No supported family [%s]" % TokenList[0],
+ File=self.MetaFile, Line=Record[-1])
+ self._CustomMakefile[TokenList[0]] = TokenList[1]
+ else:
+ self._Defs[Name] = Value
+ self._Macros[Name] = Value
+
+ #
+ # Retrieve information in sections specific to Edk.x modules
+ #
+ if not self._ModuleType:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
+ "MODULE_TYPE is not given", File=self.MetaFile)
+ if self._ModuleType not in SUP_MODULE_LIST:
+ RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, self._Platform]
+ for Record in RecordList:
+ Name = Record[1]
+ if Name == "MODULE_TYPE":
+ LineNo = Record[6]
+ break
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
+ "MODULE_TYPE %s is not supported for EDK II, valid values are:\n %s" % (self._ModuleType, ' '.join(l for l in SUP_MODULE_LIST)),
+ File=self.MetaFile, Line=LineNo)
+ if (self._Specification is None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x0001000A):
+ if self._ModuleType == SUP_MODULE_SMM_CORE:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.MetaFile)
+ if (self._Specification is None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x00010032):
+ if self._ModuleType == SUP_MODULE_MM_CORE_STANDALONE:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "MM_CORE_STANDALONE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x00010032", File=self.MetaFile)
+ if self._ModuleType == SUP_MODULE_MM_STANDALONE:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "MM_STANDALONE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x00010032", File=self.MetaFile)
+ if 'PCI_DEVICE_ID' in self._Defs and 'PCI_VENDOR_ID' in self._Defs \
+ and 'PCI_CLASS_CODE' in self._Defs and 'PCI_REVISION' in self._Defs:
+ self._BuildType = 'UEFI_OPTIONROM'
+ if 'PCI_COMPRESS' in self._Defs:
+ if self._Defs['PCI_COMPRESS'] not in ('TRUE', 'FALSE'):
+ EdkLogger.error("build", FORMAT_INVALID, "Expected TRUE/FALSE for PCI_COMPRESS: %s" % self.MetaFile)
+
+ elif 'UEFI_HII_RESOURCE_SECTION' in self._Defs \
+ and self._Defs['UEFI_HII_RESOURCE_SECTION'] == 'TRUE':
+ self._BuildType = 'UEFI_HII'
+ else:
+ self._BuildType = self._ModuleType.upper()
+
+ if self._DxsFile:
+ File = PathClass(NormPath(self._DxsFile), self._ModuleDir, Arch=self._Arch)
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate(".dxs", CaseSensitive=False)
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo,
+ File=self.MetaFile, Line=LineNo)
+ if not self._DependencyFileList:
+ self._DependencyFileList = []
+ self._DependencyFileList.append(File)
+
+ ## Retrieve file version
+ @cached_property
+ def AutoGenVersion(self):
+ RetVal = 0x00010000
+ RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, self._Platform]
+ for Record in RecordList:
+ if Record[1] == TAB_INF_DEFINES_INF_VERSION:
+ if '.' in Record[2]:
+ ValueList = Record[2].split('.')
+ Major = '%04o' % int(ValueList[0], 0)
+ Minor = '%04o' % int(ValueList[1], 0)
+ RetVal = int('0x' + Major + Minor, 0)
+ else:
+ RetVal = int(Record[2], 0)
+ break
+ return RetVal
+
+ ## Retrieve BASE_NAME
+ @cached_property
+ def BaseName(self):
+ if self._BaseName is None:
+ self._GetHeaderInfo()
+ if self._BaseName is None:
+ EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No BASE_NAME name", File=self.MetaFile)
+ return self._BaseName
+
+ ## Retrieve DxsFile
+ @cached_property
+ def DxsFile(self):
+ if self._DxsFile is None:
+ self._GetHeaderInfo()
+ if self._DxsFile is None:
+ self._DxsFile = ''
+ return self._DxsFile
+
+ ## Retrieve MODULE_TYPE
+ @cached_property
+ def ModuleType(self):
+ if self._ModuleType is None:
+ self._GetHeaderInfo()
+ if self._ModuleType is None:
+ self._ModuleType = SUP_MODULE_BASE
+ if self._ModuleType not in SUP_MODULE_LIST:
+ self._ModuleType = SUP_MODULE_USER_DEFINED
+ return self._ModuleType
+
+ ## Retrieve COMPONENT_TYPE
+ @cached_property
+ def ComponentType(self):
+ if self._ComponentType is None:
+ self._GetHeaderInfo()
+ if self._ComponentType is None:
+ self._ComponentType = SUP_MODULE_USER_DEFINED
+ return self._ComponentType
+
+ ## Retrieve "BUILD_TYPE"
+ @cached_property
+ def BuildType(self):
+ if self._BuildType is None:
+ self._GetHeaderInfo()
+ if not self._BuildType:
+ self._BuildType = SUP_MODULE_BASE
+ return self._BuildType
+
+ ## Retrieve file guid
+ @cached_property
+ def Guid(self):
+ if self._Guid is None:
+ self._GetHeaderInfo()
+ if self._Guid is None:
+ self._Guid = '00000000-0000-0000-0000-000000000000'
+ return self._Guid
+
+ ## Retrieve module version
+ @cached_property
+ def Version(self):
+ if self._Version is None:
+ self._GetHeaderInfo()
+ if self._Version is None:
+ self._Version = '0.0'
+ return self._Version
+
+ ## Retrieve PCD_IS_DRIVER
+ @cached_property
+ def PcdIsDriver(self):
+ if self._PcdIsDriver is None:
+ self._GetHeaderInfo()
+ if self._PcdIsDriver is None:
+ self._PcdIsDriver = ''
+ return self._PcdIsDriver
+
+ ## Retrieve SHADOW
+ @cached_property
+ def Shadow(self):
+ if self._Shadow is None:
+ self._GetHeaderInfo()
+ if self._Shadow and self._Shadow.upper() == 'TRUE':
+ self._Shadow = True
+ else:
+ self._Shadow = False
+ return self._Shadow
+
+ ## Retrieve CUSTOM_MAKEFILE
+ @cached_property
+ def CustomMakefile(self):
+ if self._CustomMakefile is None:
+ self._GetHeaderInfo()
+ if self._CustomMakefile is None:
+ self._CustomMakefile = {}
+ return self._CustomMakefile
+
+ ## Retrieve EFI_SPECIFICATION_VERSION
+ @cached_property
+ def Specification(self):
+ if self._Specification is None:
+ self._GetHeaderInfo()
+ if self._Specification is None:
+ self._Specification = {}
+ return self._Specification
+
+ ## Retrieve LIBRARY_CLASS
+ @cached_property
+ def LibraryClass(self):
+ if self._LibraryClass is None:
+ self._GetHeaderInfo()
+ if self._LibraryClass is None:
+ self._LibraryClass = []
+ return self._LibraryClass
+
+ ## Retrieve ENTRY_POINT
+ @cached_property
+ def ModuleEntryPointList(self):
+ if self._ModuleEntryPointList is None:
+ self._GetHeaderInfo()
+ if self._ModuleEntryPointList is None:
+ self._ModuleEntryPointList = []
+ return self._ModuleEntryPointList
+
+ ## Retrieve UNLOAD_IMAGE
+ @cached_property
+ def ModuleUnloadImageList(self):
+ if self._ModuleUnloadImageList is None:
+ self._GetHeaderInfo()
+ if self._ModuleUnloadImageList is None:
+ self._ModuleUnloadImageList = []
+ return self._ModuleUnloadImageList
+
+ ## Retrieve CONSTRUCTOR
+ @cached_property
+ def ConstructorList(self):
+ if self._ConstructorList is None:
+ self._GetHeaderInfo()
+ if self._ConstructorList is None:
+ self._ConstructorList = []
+ return self._ConstructorList
+
+ ## Retrieve DESTRUCTOR
+ @cached_property
+ def DestructorList(self):
+ if self._DestructorList is None:
+ self._GetHeaderInfo()
+ if self._DestructorList is None:
+ self._DestructorList = []
+ return self._DestructorList
+
+ ## Retrieve definies other than above ones
+ @cached_property
+ def Defines(self):
+ self._GetHeaderInfo()
+ return self._Defs
+
+ ## Retrieve binary files
+ @cached_class_function
+ def _GetBinaries(self):
+ RetVal = []
+ RecordList = self._RawData[MODEL_EFI_BINARY_FILE, self._Arch, self._Platform]
+ Macros = self._Macros
+ Macros['PROCESSOR'] = self._Arch
+ for Record in RecordList:
+ FileType = Record[0]
+ LineNo = Record[-1]
+ Target = TAB_COMMON
+ FeatureFlag = []
+ if Record[2]:
+ TokenList = GetSplitValueList(Record[2], TAB_VALUE_SPLIT)
+ if TokenList:
+ Target = TokenList[0]
+ if len(TokenList) > 1:
+ FeatureFlag = Record[1:]
+
+ File = PathClass(NormPath(Record[1], Macros), self._ModuleDir, '', FileType, True, self._Arch, '', Target)
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate()
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
+ RetVal.append(File)
+ return RetVal
+
+ ## Retrieve binary files with error check.
+ @cached_property
+ def Binaries(self):
+ RetVal = self._GetBinaries()
+ if GlobalData.gIgnoreSource and not RetVal:
+ ErrorInfo = "The INF file does not contain any RetVal to use in creating the image\n"
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, ExtraData=ErrorInfo, File=self.MetaFile)
+
+ return RetVal
+
+ ## Retrieve source files
+ @cached_property
+ def Sources(self):
+ self._GetHeaderInfo()
+ # Ignore all source files in a binary build mode
+ if GlobalData.gIgnoreSource:
+ return []
+
+ RetVal = []
+ RecordList = self._RawData[MODEL_EFI_SOURCE_FILE, self._Arch, self._Platform]
+ Macros = self._Macros
+ for Record in RecordList:
+ LineNo = Record[-1]
+ ToolChainFamily = Record[1]
+ TagName = Record[2]
+ ToolCode = Record[3]
+
+ File = PathClass(NormPath(Record[0], Macros), self._ModuleDir, '',
+ '', False, self._Arch, ToolChainFamily, '', TagName, ToolCode)
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate()
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
+
+ RetVal.append(File)
+ # add any previously found dependency files to the source list
+ if self._DependencyFileList:
+ RetVal.extend(self._DependencyFileList)
+ return RetVal
+
+ ## Retrieve library classes employed by this module
+ @cached_property
+ def LibraryClasses(self):
+ RetVal = OrderedDict()
+ RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, self._Platform]
+ for Record in RecordList:
+ Lib = Record[0]
+ Instance = Record[1]
+ if Instance:
+ Instance = NormPath(Instance, self._Macros)
+ RetVal[Lib] = Instance
+ else:
+ RetVal[Lib] = None
+ return RetVal
+
+ ## Retrieve library names (for Edk.x style of modules)
+ @cached_property
+ def Libraries(self):
+ RetVal = []
+ RecordList = self._RawData[MODEL_EFI_LIBRARY_INSTANCE, self._Arch, self._Platform]
+ for Record in RecordList:
+ LibraryName = ReplaceMacro(Record[0], self._Macros, False)
+ # in case of name with '.lib' extension, which is unusual in Edk.x inf
+ LibraryName = os.path.splitext(LibraryName)[0]
+ if LibraryName not in RetVal:
+ RetVal.append(LibraryName)
+ return RetVal
+
+ @cached_property
+ def ProtocolComments(self):
+ self.Protocols
+ return self._ProtocolComments
+
+ ## Retrieve protocols consumed/produced by this module
+ @cached_property
+ def Protocols(self):
+ RetVal = OrderedDict()
+ self._ProtocolComments = OrderedDict()
+ RecordList = self._RawData[MODEL_EFI_PROTOCOL, self._Arch, self._Platform]
+ for Record in RecordList:
+ CName = Record[0]
+ Value = _ProtocolValue(CName, self.Packages, self.MetaFile.Path)
+ if Value is None:
+ PackageList = "\n\t".join(str(P) for P in self.Packages)
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
+ "Value of Protocol [%s] is not found under [Protocols] section in" % CName,
+ ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
+ RetVal[CName] = Value
+ CommentRecords = self._RawData[MODEL_META_DATA_COMMENT, self._Arch, self._Platform, Record[5]]
+ self._ProtocolComments[CName] = [a[0] for a in CommentRecords]
+ return RetVal
+
+ @cached_property
+ def PpiComments(self):
+ self.Ppis
+ return self._PpiComments
+
+ ## Retrieve PPIs consumed/produced by this module
+ @cached_property
+ def Ppis(self):
+ RetVal = OrderedDict()
+ self._PpiComments = OrderedDict()
+ RecordList = self._RawData[MODEL_EFI_PPI, self._Arch, self._Platform]
+ for Record in RecordList:
+ CName = Record[0]
+ Value = _PpiValue(CName, self.Packages, self.MetaFile.Path)
+ if Value is None:
+ PackageList = "\n\t".join(str(P) for P in self.Packages)
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
+ "Value of PPI [%s] is not found under [Ppis] section in " % CName,
+ ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
+ RetVal[CName] = Value
+ CommentRecords = self._RawData[MODEL_META_DATA_COMMENT, self._Arch, self._Platform, Record[5]]
+ self._PpiComments[CName] = [a[0] for a in CommentRecords]
+ return RetVal
+
+ @cached_property
+ def GuidComments(self):
+ self.Guids
+ return self._GuidComments
+
+ ## Retrieve GUIDs consumed/produced by this module
+ @cached_property
+ def Guids(self):
+ RetVal = OrderedDict()
+ self._GuidComments = OrderedDict()
+ RecordList = self._RawData[MODEL_EFI_GUID, self._Arch, self._Platform]
+ for Record in RecordList:
+ CName = Record[0]
+ Value = GuidValue(CName, self.Packages, self.MetaFile.Path)
+ if Value is None:
+ PackageList = "\n\t".join(str(P) for P in self.Packages)
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
+ "Value of Guid [%s] is not found under [Guids] section in" % CName,
+ ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
+ RetVal[CName] = Value
+ CommentRecords = self._RawData[MODEL_META_DATA_COMMENT, self._Arch, self._Platform, Record[5]]
+ self._GuidComments[CName] = [a[0] for a in CommentRecords]
+
+ for Type in [MODEL_PCD_FIXED_AT_BUILD,MODEL_PCD_PATCHABLE_IN_MODULE,MODEL_PCD_FEATURE_FLAG,MODEL_PCD_DYNAMIC,MODEL_PCD_DYNAMIC_EX]:
+ RecordList = self._RawData[Type, self._Arch, self._Platform]
+ for TokenSpaceGuid, _, _, _, _, _, LineNo in RecordList:
+ # get the guid value
+ if TokenSpaceGuid not in RetVal:
+ Value = GuidValue(TokenSpaceGuid, self.Packages, self.MetaFile.Path)
+ if Value is None:
+ PackageList = "\n\t".join(str(P) for P in self.Packages)
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
+ "Value of Guid [%s] is not found under [Guids] section in" % TokenSpaceGuid,
+ ExtraData=PackageList, File=self.MetaFile, Line=LineNo)
+ RetVal[TokenSpaceGuid] = Value
+ self._GuidsUsedByPcd[TokenSpaceGuid] = Value
+ return RetVal
+
+ ## Retrieve include paths necessary for this module (for Edk.x style of modules)
+ @cached_property
+ def Includes(self):
+ RetVal = []
+ Macros = self._Macros
+ Macros['PROCESSOR'] = GlobalData.gEdkGlobal.get('PROCESSOR', self._Arch)
+ RecordList = self._RawData[MODEL_EFI_INCLUDE, self._Arch, self._Platform]
+ for Record in RecordList:
+ File = NormPath(Record[0], Macros)
+ if File[0] == '.':
+ File = os.path.join(self._ModuleDir, File)
+ else:
+ File = mws.join(GlobalData.gWorkspace, File)
+ File = RealPath(os.path.normpath(File))
+ if File:
+ RetVal.append(File)
+ return RetVal
+
+ ## Retrieve packages this module depends on
+ @cached_property
+ def Packages(self):
+ RetVal = []
+ RecordList = self._RawData[MODEL_META_DATA_PACKAGE, self._Arch, self._Platform]
+ Macros = self._Macros
+ for Record in RecordList:
+ File = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate('.dec')
+ if ErrorCode != 0:
+ LineNo = Record[-1]
+ EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
+ # parse this package now. we need it to get protocol/ppi/guid value
+ RetVal.append(self._Bdb[File, self._Arch, self._Target, self._Toolchain])
+ return RetVal
+
+ ## Retrieve PCD comments
+ @cached_property
+ def PcdComments(self):
+ self.Pcds
+ return self._PcdComments
+
+ ## Retrieve PCDs used in this module
+ @cached_property
+ def Pcds(self):
+ self._PcdComments = OrderedDict()
+ RetVal = OrderedDict()
+ RetVal.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
+ RetVal.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
+ RetVal.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG))
+ RetVal.update(self._GetPcd(MODEL_PCD_DYNAMIC))
+ RetVal.update(self._GetPcd(MODEL_PCD_DYNAMIC_EX))
+ return RetVal
+
+ @cached_property
+ def ModulePcdList(self):
+ RetVal = self.Pcds
+ return RetVal
+ @cached_property
+ def LibraryPcdList(self):
+ if bool(self.LibraryClass):
+ return []
+ RetVal = {}
+ Pcds = set()
+ for Library in self.LibInstances:
+ PcdsInLibrary = OrderedDict()
+ for Key in Library.Pcds:
+ if Key in self.Pcds or Key in Pcds:
+ continue
+ Pcds.add(Key)
+ PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])
+ RetVal[Library] = PcdsInLibrary
+ return RetVal
+ @cached_property
+ def PcdsName(self):
+ PcdsName = set()
+ for Type in (MODEL_PCD_FIXED_AT_BUILD,MODEL_PCD_PATCHABLE_IN_MODULE,MODEL_PCD_FEATURE_FLAG,MODEL_PCD_DYNAMIC,MODEL_PCD_DYNAMIC_EX):
+ RecordList = self._RawData[Type, self._Arch, self._Platform]
+ for TokenSpaceGuid, PcdCName, _, _, _, _, _ in RecordList:
+ PcdsName.add((PcdCName, TokenSpaceGuid))
+ return PcdsName
+
+ ## Retrieve build options specific to this module
+ @cached_property
+ def BuildOptions(self):
+ if self._BuildOptions is None:
+ self._BuildOptions = OrderedDict()
+ RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, self._Platform]
+ for Record in RecordList:
+ ToolChainFamily = Record[0]
+ ToolChain = Record[1]
+ Option = Record[2]
+ if (ToolChainFamily, ToolChain) not in self._BuildOptions or Option.startswith('='):
+ self._BuildOptions[ToolChainFamily, ToolChain] = Option
+ else:
+ # concatenate the option string if they're for the same tool
+ OptionString = self._BuildOptions[ToolChainFamily, ToolChain]
+ self._BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Option
+ return self._BuildOptions
+
+ ## Retrieve dependency expression
+ @cached_property
+ def Depex(self):
+ RetVal = tdict(False, 2)
+
+ # If the module has only Binaries and no Sources, then ignore [Depex]
+ if not self.Sources and self.Binaries:
+ return RetVal
+
+ RecordList = self._RawData[MODEL_EFI_DEPEX, self._Arch]
+ # PEIM and DXE drivers must have a valid [Depex] section
+ if len(self.LibraryClass) == 0 and len(RecordList) == 0:
+ if self.ModuleType == SUP_MODULE_DXE_DRIVER or self.ModuleType == SUP_MODULE_PEIM or self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER or \
+ self.ModuleType == SUP_MODULE_DXE_SAL_DRIVER or self.ModuleType == SUP_MODULE_DXE_RUNTIME_DRIVER:
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No [Depex] section or no valid expression in [Depex] section for [%s] module" \
+ % self.ModuleType, File=self.MetaFile)
+
+ if len(RecordList) != 0 and (self.ModuleType == SUP_MODULE_USER_DEFINED or self.ModuleType == SUP_MODULE_HOST_APPLICATION):
+ for Record in RecordList:
+ if Record[4] not in [SUP_MODULE_PEIM, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER]:
+ EdkLogger.error('build', FORMAT_INVALID,
+ "'%s' module must specify the type of [Depex] section" % self.ModuleType,
+ File=self.MetaFile)
+
+ TemporaryDictionary = OrderedDict()
+ for Record in RecordList:
+ DepexStr = ReplaceMacro(Record[0], self._Macros, False)
+ Arch = Record[3]
+ ModuleType = Record[4]
+ TokenList = DepexStr.split()
+ if (Arch, ModuleType) not in TemporaryDictionary:
+ TemporaryDictionary[Arch, ModuleType] = []
+ DepexList = TemporaryDictionary[Arch, ModuleType]
+ for Token in TokenList:
+ if Token in DEPEX_SUPPORTED_OPCODE_SET:
+ DepexList.append(Token)
+ elif Token.endswith(".inf"): # module file name
+ ModuleFile = os.path.normpath(Token)
+ Module = self.BuildDatabase[ModuleFile]
+ if Module is None:
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "Module is not found in active platform",
+ ExtraData=Token, File=self.MetaFile, Line=Record[-1])
+ DepexList.append(Module.Guid)
+ else:
+ # it use the Fixed PCD format
+ if '.' in Token:
+ if tuple(Token.split('.')[::-1]) not in self.Pcds:
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "PCD [{}] used in [Depex] section should be listed in module PCD section".format(Token), File=self.MetaFile, Line=Record[-1])
+ else:
+ if self.Pcds[tuple(Token.split('.')[::-1])].DatumType != TAB_VOID:
+ EdkLogger.error('build', FORMAT_INVALID, "PCD [{}] used in [Depex] section should be VOID* datum type".format(Token), File=self.MetaFile, Line=Record[-1])
+ Value = Token
+ else:
+ # get the GUID value now
+ Value = _ProtocolValue(Token, self.Packages, self.MetaFile.Path)
+ if Value is None:
+ Value = _PpiValue(Token, self.Packages, self.MetaFile.Path)
+ if Value is None:
+ Value = GuidValue(Token, self.Packages, self.MetaFile.Path)
+
+ if Value is None:
+ PackageList = "\n\t".join(str(P) for P in self.Packages)
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
+ "Value of [%s] is not found in" % Token,
+ ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
+ DepexList.append(Value)
+ for Arch, ModuleType in TemporaryDictionary:
+ RetVal[Arch, ModuleType] = TemporaryDictionary[Arch, ModuleType]
+ return RetVal
+
+ ## Retrieve dependency expression
+ @cached_property
+ def DepexExpression(self):
+ RetVal = tdict(False, 2)
+ RecordList = self._RawData[MODEL_EFI_DEPEX, self._Arch]
+ TemporaryDictionary = OrderedDict()
+ for Record in RecordList:
+ DepexStr = ReplaceMacro(Record[0], self._Macros, False)
+ Arch = Record[3]
+ ModuleType = Record[4]
+ TokenList = DepexStr.split()
+ if (Arch, ModuleType) not in TemporaryDictionary:
+ TemporaryDictionary[Arch, ModuleType] = ''
+ for Token in TokenList:
+ TemporaryDictionary[Arch, ModuleType] = TemporaryDictionary[Arch, ModuleType] + Token.strip() + ' '
+ for Arch, ModuleType in TemporaryDictionary:
+ RetVal[Arch, ModuleType] = TemporaryDictionary[Arch, ModuleType]
+ return RetVal
+ def LocalPkg(self):
+ module_path = self.MetaFile.File
+ subdir = os.path.split(module_path)[0]
+ TopDir = ""
+ while subdir:
+ subdir,TopDir = os.path.split(subdir)
+
+ for file_name in os.listdir(os.path.join(self.MetaFile.Root,TopDir)):
+ if file_name.upper().endswith("DEC"):
+ pkg = os.path.join(TopDir,file_name)
+ return pkg
+ @cached_class_function
+ def GetGuidsUsedByPcd(self):
+ self.Guid
+ return self._GuidsUsedByPcd
+
+ ## Retrieve PCD for given type
+ def _GetPcd(self, Type):
+ Pcds = OrderedDict()
+ PcdDict = tdict(True, 4)
+ PcdList = []
+ RecordList = self._RawData[Type, self._Arch, self._Platform]
+ for TokenSpaceGuid, PcdCName, Setting, Arch, Platform, Id, LineNo in RecordList:
+ PcdDict[Arch, Platform, PcdCName, TokenSpaceGuid] = (Setting, LineNo)
+ PcdList.append((PcdCName, TokenSpaceGuid))
+ CommentRecords = self._RawData[MODEL_META_DATA_COMMENT, self._Arch, self._Platform, Id]
+ Comments = []
+ for CmtRec in CommentRecords:
+ Comments.append(CmtRec[0])
+ self._PcdComments[TokenSpaceGuid, PcdCName] = Comments
+
+ # resolve PCD type, value, datum info, etc. by getting its definition from package
+ _GuidDict = self.Guids.copy()
+ for PcdCName, TokenSpaceGuid in PcdList:
+ PcdRealName = PcdCName
+ Setting, LineNo = PcdDict[self._Arch, self.Platform, PcdCName, TokenSpaceGuid]
+ if Setting is None:
+ continue
+ ValueList = AnalyzePcdData(Setting)
+ DefaultValue = ValueList[0]
+ Pcd = PcdClassObject(
+ PcdCName,
+ TokenSpaceGuid,
+ '',
+ '',
+ DefaultValue,
+ '',
+ '',
+ {},
+ False,
+ self.Guids[TokenSpaceGuid]
+ )
+ if Type == MODEL_PCD_PATCHABLE_IN_MODULE and ValueList[1]:
+ # Patch PCD: TokenSpace.PcdCName|Value|Offset
+ Pcd.Offset = ValueList[1]
+
+ if (PcdRealName, TokenSpaceGuid) in GlobalData.MixedPcd:
+ for Package in self.Packages:
+ for key in Package.Pcds:
+ if (Package.Pcds[key].TokenCName, Package.Pcds[key].TokenSpaceGuidCName) == (PcdRealName, TokenSpaceGuid):
+ for item in GlobalData.MixedPcd[(PcdRealName, TokenSpaceGuid)]:
+ Pcd_Type = item[0].split('_')[-1]
+ if Pcd_Type == Package.Pcds[key].Type:
+ Value = Package.Pcds[key]
+ Value.TokenCName = Package.Pcds[key].TokenCName + '_' + Pcd_Type
+ if len(key) == 2:
+ newkey = (Value.TokenCName, key[1])
+ elif len(key) == 3:
+ newkey = (Value.TokenCName, key[1], key[2])
+ del Package.Pcds[key]
+ Package.Pcds[newkey] = Value
+ break
+ else:
+ pass
+ else:
+ pass
+
+ # get necessary info from package declaring this PCD
+ for Package in self.Packages:
+ #
+ # 'dynamic' in INF means its type is determined by platform;
+ # if platform doesn't give its type, use 'lowest' one in the
+ # following order, if any
+ #
+ # TAB_PCDS_FIXED_AT_BUILD, TAB_PCDS_PATCHABLE_IN_MODULE, TAB_PCDS_FEATURE_FLAG, TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_EX
+ #
+ _GuidDict.update(Package.Guids)
+ PcdType = self._PCD_TYPE_STRING_[Type]
+ if Type == MODEL_PCD_DYNAMIC:
+ Pcd.Pending = True
+ for T in PCD_TYPE_LIST:
+ if (PcdRealName, TokenSpaceGuid) in GlobalData.MixedPcd:
+ for item in GlobalData.MixedPcd[(PcdRealName, TokenSpaceGuid)]:
+ if str(item[0]).endswith(T) and (item[0], item[1], T) in Package.Pcds:
+ PcdType = T
+ PcdCName = item[0]
+ break
+ else:
+ pass
+ break
+ else:
+ if (PcdRealName, TokenSpaceGuid, T) in Package.Pcds:
+ PcdType = T
+ break
+
+ else:
+ Pcd.Pending = False
+ if (PcdRealName, TokenSpaceGuid) in GlobalData.MixedPcd:
+ for item in GlobalData.MixedPcd[(PcdRealName, TokenSpaceGuid)]:
+ Pcd_Type = item[0].split('_')[-1]
+ if Pcd_Type == PcdType:
+ PcdCName = item[0]
+ break
+ else:
+ pass
+ else:
+ pass
+
+ if (PcdCName, TokenSpaceGuid, PcdType) in Package.Pcds:
+ PcdInPackage = Package.Pcds[PcdCName, TokenSpaceGuid, PcdType]
+ Pcd.Type = PcdType
+ Pcd.TokenValue = PcdInPackage.TokenValue
+
+ #
+ # Check whether the token value exist or not.
+ #
+ if Pcd.TokenValue is None or Pcd.TokenValue == "":
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "No TokenValue for PCD [%s.%s] in [%s]!" % (TokenSpaceGuid, PcdRealName, str(Package)),
+ File=self.MetaFile, Line=LineNo,
+ ExtraData=None
+ )
+ #
+ # Check hexadecimal token value length and format.
+ #
+ ReIsValidPcdTokenValue = re.compile(r"^[0][x|X][0]*[0-9a-fA-F]{1,8}$", re.DOTALL)
+ if Pcd.TokenValue.startswith("0x") or Pcd.TokenValue.startswith("0X"):
+ if ReIsValidPcdTokenValue.match(Pcd.TokenValue) is None:
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid:" % (Pcd.TokenValue, TokenSpaceGuid, PcdRealName, str(Package)),
+ File=self.MetaFile, Line=LineNo,
+ ExtraData=None
+ )
+
+ #
+ # Check decimal token value length and format.
+ #
+ else:
+ try:
+ TokenValueInt = int (Pcd.TokenValue, 10)
+ if (TokenValueInt < 0 or TokenValueInt > 4294967295):
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, as a decimal it should between: 0 - 4294967295!" % (Pcd.TokenValue, TokenSpaceGuid, PcdRealName, str(Package)),
+ File=self.MetaFile, Line=LineNo,
+ ExtraData=None
+ )
+ except:
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, it should be hexadecimal or decimal!" % (Pcd.TokenValue, TokenSpaceGuid, PcdRealName, str(Package)),
+ File=self.MetaFile, Line=LineNo,
+ ExtraData=None
+ )
+
+ Pcd.DatumType = PcdInPackage.DatumType
+ Pcd.MaxDatumSize = PcdInPackage.MaxDatumSize
+ Pcd.InfDefaultValue = Pcd.DefaultValue
+ if not Pcd.DefaultValue:
+ Pcd.DefaultValue = PcdInPackage.DefaultValue
+ else:
+ try:
+ Pcd.DefaultValue = ValueExpressionEx(Pcd.DefaultValue, Pcd.DatumType, _GuidDict)(True)
+ except BadExpression as Value:
+ EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(TokenSpaceGuid, PcdRealName, Pcd.DefaultValue, Value),
+ File=self.MetaFile, Line=LineNo)
+ break
+ else:
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "PCD [%s.%s] in [%s] is not found in dependent packages:" % (TokenSpaceGuid, PcdRealName, self.MetaFile),
+ File=self.MetaFile, Line=LineNo,
+ ExtraData="\t%s" % '\n\t'.join(str(P) for P in self.Packages)
+ )
+ Pcds[PcdCName, TokenSpaceGuid] = Pcd
+
+ return Pcds
+
+ ## check whether current module is binary module
+ @property
+ def IsBinaryModule(self):
+ if (self.Binaries and not self.Sources) or GlobalData.gIgnoreSource:
+ return True
+ return False
+def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):
+ for Key in CopyFromDict:
+ CopyToDict[Key].extend(CopyFromDict[Key])
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaDataTable.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaDataTable.py
new file mode 100755
index 00000000..22f8c7fe
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaDataTable.py
@@ -0,0 +1,306 @@
+## @file
+# This file is used to create/update/query/erase table for files
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+import Common.LongFilePathOs as os
+
+import Common.EdkLogger as EdkLogger
+from CommonDataClass import DataClass
+from CommonDataClass.DataClass import FileClass
+
+## Convert to SQL required string format
+def ConvertToSqlString(StringList):
+ return list(map(lambda s: "'" + s.replace("'", "''") + "'", StringList))
+
+## TableFile
+#
+# This class defined a common table
+#
+# @param object: Inherited from object class
+#
+# @param Cursor: Cursor of the database
+# @param TableName: Name of the table
+#
+class Table(object):
+ _COLUMN_ = ''
+ _ID_STEP_ = 1
+ _ID_MAX_ = 0x80000000
+ _DUMMY_ = 0
+
+ def __init__(self, Db, Name='', IdBase=0, Temporary=False):
+ self.Db = Db
+ self.Table = Name
+ self.IdBase = int(IdBase)
+ self.ID = int(IdBase)
+ self.Temporary = Temporary
+ self.Contents = []
+
+ def __str__(self):
+ return self.Table
+
+ ## Create table
+ #
+ # Create a table
+ #
+ def Create(self, NewTable=True):
+ self.Db.CreateEmptyTable(self.Table)
+ self.ID = self.GetId()
+
+ ## Insert table
+ #
+ # Insert a record into a table
+ #
+ def Insert(self, *Args):
+ self.ID = self.ID + self._ID_STEP_
+ if self.ID >= (self.IdBase + self._ID_MAX_):
+ self.ID = self.IdBase + self._ID_STEP_
+ row = [self.ID]
+ row.extend(Args)
+ self.Contents.append(row)
+
+ return self.ID
+
+
+ ## Get count
+ #
+ # Get a count of all records of the table
+ #
+ # @retval Count: Total count of all records
+ #
+ def GetCount(self):
+ tab = self.Db.GetTable(self.Table)
+ return len(tab)
+
+
+ def GetId(self):
+ tab = self.Db.GetTable(self.Table)
+ Id = max([int(item[0]) for item in tab])
+ if Id is None:
+ Id = self.IdBase
+ return Id
+
+ ## Init the ID of the table
+ #
+ # Init the ID of the table
+ #
+ def InitID(self):
+ self.ID = self.GetId()
+
+ ## Exec
+ #
+ # Exec Sql Command, return result
+ #
+ # @param SqlCommand: The SqlCommand to be executed
+ #
+ # @retval RecordSet: The result after executed
+ #
+ def Exec(self, SqlCommand):
+ EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)
+ self.Db.execute(SqlCommand)
+ RecordSet = self.Db.fetchall()
+ return RecordSet
+
+ def SetEndFlag(self):
+ Tab = self.Db.GetTable(self.Table)
+ Tab.append(self._DUMMY_)
+
+
+ def IsIntegral(self):
+ tab = self.Db.GetTable(self.Table)
+ Id = min([int(item[0]) for item in tab])
+ if Id != -1:
+ return False
+ return True
+
+ def GetAll(self):
+ tab = self.Db.GetTable(self.Table)
+ return tab
+
+
+## TableFile
+#
+# This class defined a table used for file
+#
+# @param object: Inherited from object class
+#
+class TableFile(Table):
+ _COLUMN_ = '''
+ ID INTEGER PRIMARY KEY,
+ Name VARCHAR NOT NULL,
+ ExtName VARCHAR,
+ Path VARCHAR,
+ FullPath VARCHAR NOT NULL,
+ Model INTEGER DEFAULT 0,
+ TimeStamp SINGLE NOT NULL,
+ FromItem REAL NOT NULL
+ '''
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor, 'File')
+
+ ## Insert table
+ #
+ # Insert a record into table File
+ #
+ # @param Name: Name of a File
+ # @param ExtName: ExtName of a File
+ # @param Path: Path of a File
+ # @param FullPath: FullPath of a File
+ # @param Model: Model of a File
+ # @param TimeStamp: TimeStamp of a File
+ #
+ def Insert(self, Name, ExtName, Path, FullPath, Model, TimeStamp, FromItem=0):
+ (Name, ExtName, Path, FullPath) = ConvertToSqlString((Name, ExtName, Path, FullPath))
+ return Table.Insert(
+ self,
+ Name,
+ ExtName,
+ Path,
+ FullPath,
+ Model,
+ TimeStamp,
+ FromItem
+ )
+
+ ## InsertFile
+ #
+ # Insert one file to table
+ #
+ # @param FileFullPath: The full path of the file
+ # @param Model: The model of the file
+ #
+ # @retval FileID: The ID after record is inserted
+ #
+ def InsertFile(self, File, Model, FromItem=''):
+ if FromItem:
+ return self.Insert(
+ File.Name,
+ File.Ext,
+ File.Dir,
+ File.Path,
+ Model,
+ File.TimeStamp,
+ FromItem
+ )
+ return self.Insert(
+ File.Name,
+ File.Ext,
+ File.Dir,
+ File.Path,
+ Model,
+ File.TimeStamp
+ )
+
+ ## Get type of a given file
+ #
+ # @param FileId ID of a file
+ #
+ # @retval file_type Model value of given file in the table
+ #
+ def GetFileType(self, FileId):
+ QueryScript = "select Model from %s where ID = '%s'" % (self.Table, FileId)
+ RecordList = self.Exec(QueryScript)
+ if len(RecordList) == 0:
+ return None
+ return RecordList[0][0]
+
+ ## Get file timestamp of a given file
+ #
+ # @param FileId ID of file
+ #
+ # @retval timestamp TimeStamp value of given file in the table
+ #
+ def GetFileTimeStamp(self, FileId):
+ QueryScript = "select TimeStamp from %s where ID = '%s'" % (self.Table, FileId)
+ RecordList = self.Exec(QueryScript)
+ if len(RecordList) == 0:
+ return None
+ return RecordList[0][0]
+
+ ## Update the timestamp of a given file
+ #
+ # @param FileId ID of file
+ # @param TimeStamp Time stamp of file
+ #
+ def SetFileTimeStamp(self, FileId, TimeStamp):
+ self.Exec("update %s set TimeStamp=%s where ID='%s'" % (self.Table, TimeStamp, FileId))
+
+ ## Get list of file with given type
+ #
+ # @param FileType Type value of file
+ #
+ # @retval file_list List of files with the given type
+ #
+ def GetFileList(self, FileType):
+ RecordList = self.Exec("select FullPath from %s where Model=%s" % (self.Table, FileType))
+ if len(RecordList) == 0:
+ return []
+ return [R[0] for R in RecordList]
+
+## TableDataModel
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableDataModel(Table):
+ _COLUMN_ = """
+ ID INTEGER PRIMARY KEY,
+ CrossIndex INTEGER NOT NULL,
+ Name VARCHAR NOT NULL,
+ Description VARCHAR
+ """
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor, 'DataModel')
+
+ ## Insert table
+ #
+ # Insert a record into table DataModel
+ #
+ # @param ID: ID of a ModelType
+ # @param CrossIndex: CrossIndex of a ModelType
+ # @param Name: Name of a ModelType
+ # @param Description: Description of a ModelType
+ #
+ def Insert(self, CrossIndex, Name, Description):
+ (Name, Description) = ConvertToSqlString((Name, Description))
+ return Table.Insert(self, CrossIndex, Name, Description)
+
+ ## Init table
+ #
+ # Create all default records of table DataModel
+ #
+ def InitTable(self):
+ EdkLogger.verbose("\nInitialize table DataModel started ...")
+ Count = self.GetCount()
+ if Count is not None and Count != 0:
+ return
+ for Item in DataClass.MODEL_LIST:
+ CrossIndex = Item[1]
+ Name = Item[0]
+ Description = Item[0]
+ self.Insert(CrossIndex, Name, Description)
+ EdkLogger.verbose("Initialize table DataModel ... DONE!")
+
+ ## Get CrossIndex
+ #
+ # Get a model's cross index from its name
+ #
+ # @param ModelName: Name of the model
+ # @retval CrossIndex: CrossIndex of the model
+ #
+ def GetCrossIndex(self, ModelName):
+ CrossIndex = -1
+ SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""
+ self.Db.execute(SqlCommand)
+ for Item in self.Db:
+ CrossIndex = Item[0]
+
+ return CrossIndex
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaFileCommentParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaFileCommentParser.py
new file mode 100755
index 00000000..333d56f3
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaFileCommentParser.py
@@ -0,0 +1,45 @@
+## @file
+# This file is used to check format of comments
+#
+# Copyright (c) 2012, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from CommonDataClass.DataClass import (
+ MODEL_PCD_PATCHABLE_IN_MODULE,
+ MODEL_PCD_DYNAMIC_EX,
+ MODEL_PCD_DYNAMIC,
+ MODEL_EFI_GUID,
+ MODEL_EFI_PPI,
+ MODEL_EFI_PROTOCOL
+)
+from Common.BuildToolError import FORMAT_INVALID
+import Common.EdkLogger as EdkLogger
+
+UsageList = ("PRODUCES", "PRODUCED", "ALWAYS_PRODUCES", "ALWAYS_PRODUCED", "SOMETIMES_PRODUCES",
+ "SOMETIMES_PRODUCED", "CONSUMES", "CONSUMED", "ALWAYS_CONSUMES", "ALWAYS_CONSUMED",
+ "SOMETIMES_CONSUMES", "SOMETIMES_CONSUMED", "SOMETIME_CONSUMES")
+ErrorMsgMap = {
+ MODEL_EFI_GUID : "The usage for this GUID is not listed in this INF: %s[%d]:%s",
+ MODEL_EFI_PPI : "The usage for this PPI is not listed in this INF: %s[%d]:%s.",
+ MODEL_EFI_PROTOCOL : "The usage for this Protocol is not listed in this INF: %s[%d]:%s.",
+ MODEL_PCD_DYNAMIC : "The usage for this PCD is not listed in this INF: %s[%d]:%s."
+}
+
+def CheckInfComment(SectionType, Comments, InfFile, LineNo, ValueList):
+ if SectionType in [MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC_EX, MODEL_PCD_DYNAMIC]:
+ CheckUsage(Comments, UsageList, InfFile, LineNo, ValueList[0]+'.'+ValueList[1], ErrorMsgMap[MODEL_PCD_DYNAMIC])
+ elif SectionType in [MODEL_EFI_GUID, MODEL_EFI_PPI]:
+ CheckUsage(Comments, UsageList, InfFile, LineNo, ValueList[0], ErrorMsgMap[SectionType])
+ elif SectionType == MODEL_EFI_PROTOCOL:
+ CheckUsage(Comments, UsageList + ("TO_START", "BY_START"), InfFile, LineNo, ValueList[0], ErrorMsgMap[SectionType])
+
+def CheckUsage(Comments, Usages, InfFile, LineNo, Value, ErrorMsg):
+ for Comment in Comments:
+ for Word in Comment[0].replace('#', ' ').split():
+ if Word in Usages:
+ return
+ EdkLogger.error(
+ "Parser", FORMAT_INVALID,
+ ErrorMsg % (InfFile, LineNo, Value)
+ )
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaFileParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaFileParser.py
new file mode 100755
index 00000000..4cc253e1
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaFileParser.py
@@ -0,0 +1,2193 @@
+## @file
+# This file is used to parse meta files
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# (C) Copyright 2015-2018 Hewlett Packard Enterprise Development LP<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+import Common.LongFilePathOs as os
+import re
+import time
+import copy
+from hashlib import md5
+
+import Common.EdkLogger as EdkLogger
+import Common.GlobalData as GlobalData
+
+from CommonDataClass.DataClass import *
+from Common.DataType import *
+from Common.StringUtils import *
+from Common.Misc import GuidStructureStringToGuidString, CheckPcdDatum, PathClass, AnalyzePcdData, AnalyzeDscPcd, AnalyzePcdExpression, ParseFieldValue, StructPattern
+from Common.Expression import *
+from CommonDataClass.Exceptions import *
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from collections import defaultdict
+from .MetaFileTable import MetaFileStorage
+from .MetaFileCommentParser import CheckInfComment
+from Common.DataType import TAB_COMMENT_EDK_START, TAB_COMMENT_EDK_END
+
+## RegEx for finding file versions
+hexVersionPattern = re.compile(r'0[xX][\da-f-A-F]{5,8}')
+decVersionPattern = re.compile(r'\d+\.\d+')
+CODEPattern = re.compile(r"{CODE\([a-fA-F0-9Xx\{\},\s]*\)}")
+
+## A decorator used to parse macro definition
+def ParseMacro(Parser):
+ def MacroParser(self):
+ Match = GlobalData.gMacroDefPattern.match(self._CurrentLine)
+ if not Match:
+ # Not 'DEFINE/EDK_GLOBAL' statement, call decorated method
+ Parser(self)
+ return
+
+ TokenList = GetSplitValueList(self._CurrentLine[Match.end(1):], TAB_EQUAL_SPLIT, 1)
+ # Syntax check
+ if not TokenList[0]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No macro name given",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ if len(TokenList) < 2:
+ TokenList.append('')
+
+ Type = Match.group(1)
+ Name, Value = TokenList
+ # Global macros can be only defined via environment variable
+ if Name in GlobalData.gGlobalDefines:
+ EdkLogger.error('Parser', FORMAT_INVALID, "%s can only be defined via environment variable" % Name,
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ # Only upper case letters, digit and '_' are allowed
+ if not GlobalData.gMacroNamePattern.match(Name):
+ EdkLogger.error('Parser', FORMAT_INVALID, "The macro name must be in the pattern [A-Z][A-Z0-9_]*",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+
+ Value = ReplaceMacro(Value, self._Macros)
+ if Type in self.DataType:
+ self._ItemType = self.DataType[Type]
+ else:
+ self._ItemType = MODEL_META_DATA_DEFINE
+ # DEFINE defined macros
+ if Type == TAB_DSC_DEFINES_DEFINE:
+ #
+ # First judge whether this DEFINE is in conditional directive statements or not.
+ #
+ if isinstance(self, DscParser) and self._InDirective > -1:
+ pass
+ else:
+ if isinstance(self, DecParser):
+ if MODEL_META_DATA_HEADER in self._SectionType:
+ self._FileLocalMacros[Name] = Value
+ else:
+ self._ConstructSectionMacroDict(Name, Value)
+ elif self._SectionType == MODEL_META_DATA_HEADER:
+ self._FileLocalMacros[Name] = Value
+ else:
+ self._ConstructSectionMacroDict(Name, Value)
+
+ # EDK_GLOBAL defined macros
+ elif not isinstance(self, DscParser):
+ EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used in .dsc file",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ elif self._SectionType != MODEL_META_DATA_HEADER:
+ EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used under [Defines] section",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ elif (Name in self._FileLocalMacros) and (self._FileLocalMacros[Name] != Value):
+ EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL defined a macro with the same name and different value as one defined by 'DEFINE'",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+
+ self._ValueList = [Type, Name, Value]
+
+ return MacroParser
+
+## Base class of parser
+#
+# This class is used for derivation purpose. The specific parser for one kind
+# type file must derive this class and implement some public interfaces.
+#
+# @param FilePath The path of platform description file
+# @param FileType The raw data of DSC file
+# @param Table Database used to retrieve module/package information
+# @param Macros Macros used for replacement in file
+# @param Owner Owner ID (for sub-section parsing)
+# @param From ID from which the data comes (for !INCLUDE directive)
+#
+class MetaFileParser(object):
+ # data type (file content) for specific file type
+ DataType = {}
+
+ # Parser objects used to implement singleton
+ MetaFiles = {}
+
+ ## Factory method
+ #
+ # One file, one parser object. This factory method makes sure that there's
+ # only one object constructed for one meta file.
+ #
+ # @param Class class object of real AutoGen class
+ # (InfParser, DecParser or DscParser)
+ # @param FilePath The path of meta file
+ # @param *args The specific class related parameters
+ # @param **kwargs The specific class related dict parameters
+ #
+ def __new__(Class, FilePath, *args, **kwargs):
+ if FilePath in Class.MetaFiles:
+ return Class.MetaFiles[FilePath]
+ else:
+ ParserObject = super(MetaFileParser, Class).__new__(Class)
+ Class.MetaFiles[FilePath] = ParserObject
+ return ParserObject
+
+ ## Constructor of MetaFileParser
+ #
+ # Initialize object of MetaFileParser
+ #
+ # @param FilePath The path of platform description file
+ # @param FileType The raw data of DSC file
+ # @param Arch Default Arch value for filtering sections
+ # @param Table Database used to retrieve module/package information
+ # @param Owner Owner ID (for sub-section parsing)
+ # @param From ID from which the data comes (for !INCLUDE directive)
+ #
+ def __init__(self, FilePath, FileType, Arch, Table, Owner= -1, From= -1):
+ self._Table = Table
+ self._RawTable = Table
+ self._Arch = Arch
+ self._FileType = FileType
+ self.MetaFile = FilePath
+ self._FileDir = self.MetaFile.Dir
+ self._Defines = {}
+ self._Packages = []
+ self._FileLocalMacros = {}
+ self._SectionsMacroDict = defaultdict(dict)
+
+ # for recursive parsing
+ self._Owner = [Owner]
+ self._From = From
+
+ # parsr status for parsing
+ self._ValueList = ['', '', '', '', '']
+ self._Scope = []
+ self._LineIndex = 0
+ self._CurrentLine = ''
+ self._SectionType = MODEL_UNKNOWN
+ self._SectionName = ''
+ self._InSubsection = False
+ self._SubsectionType = MODEL_UNKNOWN
+ self._SubsectionName = ''
+ self._ItemType = MODEL_UNKNOWN
+ self._LastItem = -1
+ self._Enabled = 0
+ self._Finished = False
+ self._PostProcessed = False
+ # Different version of meta-file has different way to parse.
+ self._Version = 0
+ self._GuidDict = {} # for Parser PCD value {GUID(gTokeSpaceGuidName)}
+
+ self._PcdCodeValue = ""
+ self._PcdDataTypeCODE = False
+ self._CurrentPcdName = ""
+
+ ## Store the parsed data in table
+ def _Store(self, *Args):
+ return self._Table.Insert(*Args)
+
+ ## Virtual method for starting parse
+ def Start(self):
+ raise NotImplementedError
+
+ ## Notify a post-process is needed
+ def DoPostProcess(self):
+ self._PostProcessed = False
+
+ ## Set parsing complete flag in both class and table
+ def _Done(self):
+ self._Finished = True
+ self._Table.SetEndFlag()
+
+ def _PostProcess(self):
+ self._PostProcessed = True
+
+ ## Get the parse complete flag
+ @property
+ def Finished(self):
+ return self._Finished
+
+ ## Set the complete flag
+ @Finished.setter
+ def Finished(self, Value):
+ self._Finished = Value
+
+ ## Remove records that do not match given Filter Arch
+ def _FilterRecordList(self, RecordList, FilterArch):
+ NewRecordList = []
+ for Record in RecordList:
+ Arch = Record[3]
+ if Arch == TAB_ARCH_COMMON or Arch == FilterArch:
+ NewRecordList.append(Record)
+ return NewRecordList
+
+ ## Use [] style to query data in table, just for readability
+ #
+ # DataInfo = [data_type, scope1(arch), scope2(platform/moduletype)]
+ #
+ def __getitem__(self, DataInfo):
+ if not isinstance(DataInfo, type(())):
+ DataInfo = (DataInfo,)
+
+ # Parse the file first, if necessary
+ self.StartParse()
+
+ # No specific ARCH or Platform given, use raw data
+ if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] is None):
+ return self._FilterRecordList(self._RawTable.Query(*DataInfo), self._Arch)
+
+ # Do post-process if necessary
+ if not self._PostProcessed:
+ self._PostProcess()
+
+ return self._FilterRecordList(self._Table.Query(*DataInfo), DataInfo[1])
+
+ def StartParse(self):
+ if not self._Finished:
+ if self._RawTable.IsIntegrity():
+ self._Finished = True
+ else:
+ self._Table = self._RawTable
+ self._PostProcessed = False
+ self.Start()
+ ## Data parser for the common format in different type of file
+ #
+ # The common format in the meatfile is like
+ #
+ # xxx1 | xxx2 | xxx3
+ #
+ @ParseMacro
+ def _CommonParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ self._ValueList[0:len(TokenList)] = TokenList
+
+ ## Data parser for the format in which there's path
+ #
+ # Only path can have macro used. So we need to replace them before use.
+ #
+ @ParseMacro
+ def _PathParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ self._ValueList[0:len(TokenList)] = TokenList
+ # Don't do macro replacement for dsc file at this point
+ if not isinstance(self, DscParser):
+ Macros = self._Macros
+ self._ValueList = [ReplaceMacro(Value, Macros) for Value in self._ValueList]
+
+ ## Skip unsupported data
+ def _Skip(self):
+ EdkLogger.warn("Parser", "Unrecognized content", File=self.MetaFile,
+ Line=self._LineIndex + 1, ExtraData=self._CurrentLine);
+ self._ValueList[0:1] = [self._CurrentLine]
+
+ ## Skip unsupported data for UserExtension Section
+ def _SkipUserExtension(self):
+ self._ValueList[0:1] = [self._CurrentLine]
+
+ ## Section header parser
+ #
+ # The section header is always in following format:
+ #
+ # [section_name.arch<.platform|module_type>]
+ #
+ def _SectionHeaderParser(self):
+ self._Scope = []
+ self._SectionName = ''
+ ArchList = set()
+ for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
+ if Item == '':
+ continue
+ ItemList = GetSplitValueList(Item, TAB_SPLIT, 3)
+ # different section should not mix in one section
+ if self._SectionName != '' and self._SectionName != ItemList[0].upper():
+ EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section",
+ File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
+ self._SectionName = ItemList[0].upper()
+ if self._SectionName in self.DataType:
+ self._SectionType = self.DataType[self._SectionName]
+ # Check if the section name is valid
+ if self._SectionName not in SECTIONS_HAVE_ITEM_AFTER_ARCH_SET and len(ItemList) > 3:
+ EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
+ self.MetaFile, self._LineIndex + 1, self._CurrentLine)
+ elif self._Version >= 0x00010005:
+ EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
+ self.MetaFile, self._LineIndex + 1, self._CurrentLine)
+ else:
+ self._SectionType = MODEL_UNKNOWN
+
+ # S1 is always Arch
+ if len(ItemList) > 1:
+ S1 = ItemList[1].upper()
+ else:
+ S1 = TAB_ARCH_COMMON
+ S1 = ReplaceMacro(S1, self._Macros)
+ ArchList.add(S1)
+
+ # S2 may be Platform or ModuleType
+ if len(ItemList) > 2:
+ if self._SectionName.upper() in SECTIONS_HAVE_ITEM_PCD_SET:
+ S2 = ItemList[2]
+ else:
+ S2 = ItemList[2].upper()
+ else:
+ S2 = TAB_COMMON
+ if len(ItemList) > 3:
+ S3 = ItemList[3]
+ else:
+ S3 = TAB_COMMON
+ self._Scope.append([S1, S2, S3])
+
+ # 'COMMON' must not be used with specific ARCHs at the same section
+ if TAB_ARCH_COMMON in ArchList and len(ArchList) > 1:
+ EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
+ File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
+ # If the section information is needed later, it should be stored in database
+ self._ValueList[0] = self._SectionName
+
+ ## [packages] section parser
+ @ParseMacro
+ def _PackageParser(self):
+ self._CurrentLine = CleanString(self._CurrentLine)
+ self._Packages.append(self._CurrentLine)
+ self._ValueList[0] = self._CurrentLine
+
+ ## [defines] section parser
+ @ParseMacro
+ def _DefineParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ self._ValueList[1:len(TokenList)] = TokenList
+ if not self._ValueList[1]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No name specified",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ if not self._ValueList[2]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+
+ self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
+ Name, Value = self._ValueList[1], self._ValueList[2]
+ MacroUsed = GlobalData.gMacroRefPattern.findall(Value)
+ if len(MacroUsed) != 0:
+ for Macro in MacroUsed:
+ if Macro in GlobalData.gGlobalDefines:
+ EdkLogger.error("Parser", FORMAT_INVALID, "Global macro %s is not permitted." % (Macro), ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ else:
+ EdkLogger.error("Parser", FORMAT_INVALID, "%s not defined" % (Macro), ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ # Sometimes, we need to make differences between EDK and EDK2 modules
+ if Name == 'INF_VERSION':
+ if hexVersionPattern.match(Value):
+ self._Version = int(Value, 0)
+ elif decVersionPattern.match(Value):
+ ValueList = Value.split('.')
+ Major = int(ValueList[0], 0)
+ Minor = int(ValueList[1], 0)
+ if Major > 0xffff or Minor > 0xffff:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Invalid version number",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ self._Version = int('0x{0:04x}{1:04x}'.format(Major, Minor), 0)
+ else:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Invalid version number",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+
+ if isinstance(self, InfParser) and self._Version < 0x00010005:
+ # EDK module allows using defines as macros
+ self._FileLocalMacros[Name] = Value
+ self._Defines[Name] = Value
+
+ ## [BuildOptions] section parser
+ @ParseMacro
+ def _BuildOptionParser(self):
+ self._CurrentLine = CleanString(self._CurrentLine, BuildOption=True)
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ TokenList2 = GetSplitValueList(TokenList[0], ':', 1)
+ if len(TokenList2) == 2:
+ self._ValueList[0] = TokenList2[0] # toolchain family
+ self._ValueList[1] = TokenList2[1] # keys
+ else:
+ self._ValueList[1] = TokenList[0]
+ if len(TokenList) == 2 and not isinstance(self, DscParser): # value
+ self._ValueList[2] = ReplaceMacro(TokenList[1], self._Macros)
+
+ if self._ValueList[1].count('_') != 4:
+ EdkLogger.error(
+ 'Parser',
+ FORMAT_INVALID,
+ "'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1],
+ ExtraData=self._CurrentLine,
+ File=self.MetaFile,
+ Line=self._LineIndex + 1
+ )
+ def GetValidExpression(self, TokenSpaceGuid, PcdCName):
+ return self._Table.GetValidExpression(TokenSpaceGuid, PcdCName)
+
+ @property
+ def _Macros(self):
+ Macros = {}
+ Macros.update(self._FileLocalMacros)
+ Macros.update(self._GetApplicableSectionMacro())
+ return Macros
+
+ ## Construct section Macro dict
+ def _ConstructSectionMacroDict(self, Name, Value):
+ ScopeKey = [(Scope[0], Scope[1], Scope[2]) for Scope in self._Scope]
+ ScopeKey = tuple(ScopeKey)
+ #
+ # DecParser SectionType is a list, will contain more than one item only in Pcd Section
+ # As Pcd section macro usage is not allowed, so here it is safe
+ #
+ if isinstance(self, DecParser):
+ SectionDictKey = self._SectionType[0], ScopeKey
+ else:
+ SectionDictKey = self._SectionType, ScopeKey
+
+ self._SectionsMacroDict[SectionDictKey][Name] = Value
+
+ ## Get section Macros that are applicable to current line, which may come from other sections
+ ## that share the same name while scope is wider
+ def _GetApplicableSectionMacro(self):
+ Macros = {}
+
+ ComComMacroDict = {}
+ ComSpeMacroDict = {}
+ SpeSpeMacroDict = {}
+
+ ActiveSectionType = self._SectionType
+ if isinstance(self, DecParser):
+ ActiveSectionType = self._SectionType[0]
+
+ for (SectionType, Scope) in self._SectionsMacroDict:
+ if SectionType != ActiveSectionType:
+ continue
+
+ for ActiveScope in self._Scope:
+ Scope0, Scope1, Scope2= ActiveScope[0], ActiveScope[1], ActiveScope[2]
+ if(Scope0, Scope1, Scope2) not in Scope:
+ break
+ else:
+ SpeSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
+
+ for ActiveScope in self._Scope:
+ Scope0, Scope1, Scope2 = ActiveScope[0], ActiveScope[1], ActiveScope[2]
+ if(Scope0, Scope1, Scope2) not in Scope and (Scope0, TAB_COMMON, TAB_COMMON) not in Scope and (TAB_COMMON, Scope1, TAB_COMMON) not in Scope:
+ break
+ else:
+ ComSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
+
+ if (TAB_COMMON, TAB_COMMON, TAB_COMMON) in Scope:
+ ComComMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
+
+ Macros.update(ComComMacroDict)
+ Macros.update(ComSpeMacroDict)
+ Macros.update(SpeSpeMacroDict)
+
+ return Macros
+
+ def ProcessMultipleLineCODEValue(self,Content):
+ CODEBegin = False
+ CODELine = ""
+ continuelinecount = 0
+ newContent = []
+ for Index in range(0, len(Content)):
+ Line = Content[Index]
+ if CODEBegin:
+ CODELine = CODELine + Line
+ continuelinecount +=1
+ if ")}" in Line:
+ newContent.append(CODELine)
+ for _ in range(continuelinecount):
+ newContent.append("")
+ CODEBegin = False
+ CODELine = ""
+ continuelinecount = 0
+ else:
+ if not Line:
+ newContent.append(Line)
+ continue
+ if "{CODE(" not in Line:
+ newContent.append(Line)
+ continue
+ elif CODEPattern.findall(Line):
+ newContent.append(Line)
+ continue
+ else:
+ CODEBegin = True
+ CODELine = Line
+
+ return newContent
+
+ _SectionParser = {}
+
+## INF file parser class
+#
+# @param FilePath The path of platform description file
+# @param FileType The raw data of DSC file
+# @param Table Database used to retrieve module/package information
+# @param Macros Macros used for replacement in file
+#
+class InfParser(MetaFileParser):
+ # INF file supported data types (one type per section)
+ DataType = {
+ TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+ TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
+ TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
+ TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE,
+ TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC,
+ TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_DEPEX.upper() : MODEL_EFI_DEPEX,
+ TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+ }
+
+ ## Constructor of InfParser
+ #
+ # Initialize object of InfParser
+ #
+ # @param FilePath The path of module description file
+ # @param FileType The raw data of DSC file
+ # @param Arch Default Arch value for filtering sections
+ # @param Table Database used to retrieve module/package information
+ #
+ def __init__(self, FilePath, FileType, Arch, Table):
+ # prevent re-initialization
+ if hasattr(self, "_Table"):
+ return
+ MetaFileParser.__init__(self, FilePath, FileType, Arch, Table)
+ self.PcdsDict = {}
+
+ ## Parser starter
+ def Start(self):
+ NmakeLine = ''
+ Content = ''
+ try:
+ with open(str(self.MetaFile), 'r') as File:
+ Content = File.readlines()
+ except:
+ EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
+
+ # parse the file line by line
+ IsFindBlockComment = False
+ GetHeaderComment = False
+ TailComments = []
+ SectionComments = []
+ Comments = []
+
+ for Index in range(0, len(Content)):
+ # skip empty, commented, block commented lines
+ Line, Comment = CleanString2(Content[Index], AllowCppStyleComment=True)
+ NextLine = ''
+ if Index + 1 < len(Content):
+ NextLine, NextComment = CleanString2(Content[Index + 1])
+ if Line == '':
+ if Comment:
+ Comments.append((Comment, Index + 1))
+ elif GetHeaderComment:
+ SectionComments.extend(Comments)
+ Comments = []
+ continue
+ if Line.find(TAB_COMMENT_EDK_START) > -1:
+ IsFindBlockComment = True
+ continue
+ if Line.find(TAB_COMMENT_EDK_END) > -1:
+ IsFindBlockComment = False
+ continue
+ if IsFindBlockComment:
+ continue
+
+ self._LineIndex = Index
+ self._CurrentLine = Line
+
+ # section header
+ if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
+ if not GetHeaderComment:
+ for Cmt, LNo in Comments:
+ self._Store(MODEL_META_DATA_HEADER_COMMENT, Cmt, '', '', TAB_COMMON,
+ TAB_COMMON, self._Owner[-1], LNo, -1, LNo, -1, 0)
+ GetHeaderComment = True
+ else:
+ TailComments.extend(SectionComments + Comments)
+ Comments = []
+ self._SectionHeaderParser()
+ # Check invalid sections
+ if self._Version < 0x00010005:
+ if self._SectionType in [MODEL_META_DATA_BUILD_OPTION,
+ MODEL_EFI_LIBRARY_CLASS,
+ MODEL_META_DATA_PACKAGE,
+ MODEL_PCD_FIXED_AT_BUILD,
+ MODEL_PCD_PATCHABLE_IN_MODULE,
+ MODEL_PCD_FEATURE_FLAG,
+ MODEL_PCD_DYNAMIC_EX,
+ MODEL_PCD_DYNAMIC,
+ MODEL_EFI_GUID,
+ MODEL_EFI_PROTOCOL,
+ MODEL_EFI_PPI,
+ MODEL_META_DATA_USER_EXTENSION]:
+ EdkLogger.error('Parser', FORMAT_INVALID,
+ "Section [%s] is not allowed in inf file without version" % (self._SectionName),
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ elif self._SectionType in [MODEL_EFI_INCLUDE,
+ MODEL_EFI_LIBRARY_INSTANCE,
+ MODEL_META_DATA_NMAKE]:
+ EdkLogger.error('Parser', FORMAT_INVALID,
+ "Section [%s] is not allowed in inf file with version 0x%08x" % (self._SectionName, self._Version),
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ continue
+ # merge two lines specified by '\' in section NMAKE
+ elif self._SectionType == MODEL_META_DATA_NMAKE:
+ if Line[-1] == '\\':
+ if NextLine == '':
+ self._CurrentLine = NmakeLine + Line[0:-1]
+ NmakeLine = ''
+ else:
+ if NextLine[0] == TAB_SECTION_START and NextLine[-1] == TAB_SECTION_END:
+ self._CurrentLine = NmakeLine + Line[0:-1]
+ NmakeLine = ''
+ else:
+ NmakeLine = NmakeLine + ' ' + Line[0:-1]
+ continue
+ else:
+ self._CurrentLine = NmakeLine + Line
+ NmakeLine = ''
+
+ # section content
+ self._ValueList = ['', '', '']
+ # parse current line, result will be put in self._ValueList
+ self._SectionParser[self._SectionType](self)
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
+ self._ItemType = -1
+ Comments = []
+ continue
+ if Comment:
+ Comments.append((Comment, Index + 1))
+ if GlobalData.gOptions and GlobalData.gOptions.CheckUsage:
+ CheckInfComment(self._SectionType, Comments, str(self.MetaFile), Index + 1, self._ValueList)
+ #
+ # Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1,
+ # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
+ #
+ for Arch, Platform, _ in self._Scope:
+ LastItem = self._Store(self._SectionType,
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ Arch,
+ Platform,
+ self._Owner[-1],
+ self._LineIndex + 1,
+ - 1,
+ self._LineIndex + 1,
+ - 1,
+ 0
+ )
+ for Comment, LineNo in Comments:
+ self._Store(MODEL_META_DATA_COMMENT, Comment, '', '', Arch, Platform,
+ LastItem, LineNo, -1, LineNo, -1, 0)
+ Comments = []
+ SectionComments = []
+ TailComments.extend(SectionComments + Comments)
+ if IsFindBlockComment:
+ EdkLogger.error("Parser", FORMAT_INVALID, "Open block comments (starting with /*) are expected to end with */",
+ File=self.MetaFile)
+
+ # If there are tail comments in INF file, save to database whatever the comments are
+ for Comment in TailComments:
+ self._Store(MODEL_META_DATA_TAIL_COMMENT, Comment[0], '', '', TAB_COMMON,
+ TAB_COMMON, self._Owner[-1], -1, -1, -1, -1, 0)
+ self._Done()
+
+ ## Data parser for the format in which there's path
+ #
+ # Only path can have macro used. So we need to replace them before use.
+ #
+ def _IncludeParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ self._ValueList[0:len(TokenList)] = TokenList
+ Macros = self._Macros
+ if Macros:
+ for Index in range(0, len(self._ValueList)):
+ Value = self._ValueList[Index]
+ if not Value:
+ continue
+ self._ValueList[Index] = ReplaceMacro(Value, Macros)
+
+ ## Parse [Sources] section
+ #
+ # Only path can have macro used. So we need to replace them before use.
+ #
+ @ParseMacro
+ def _SourceFileParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ self._ValueList[0:len(TokenList)] = TokenList
+ Macros = self._Macros
+ # For Acpi tables, remove macro like ' TABLE_NAME=Sata1'
+ if 'COMPONENT_TYPE' in Macros:
+ if self._Defines['COMPONENT_TYPE'].upper() == 'ACPITABLE':
+ self._ValueList[0] = GetSplitValueList(self._ValueList[0], ' ', 1)[0]
+ if self._Defines['BASE_NAME'] == 'Microcode':
+ pass
+ self._ValueList = [ReplaceMacro(Value, Macros) for Value in self._ValueList]
+
+ ## Parse [Binaries] section
+ #
+ # Only path can have macro used. So we need to replace them before use.
+ #
+ @ParseMacro
+ def _BinaryFileParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 2)
+ if len(TokenList) < 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No file type or path specified",
+ ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ if not TokenList[0]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No file type specified",
+ ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ if not TokenList[1]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No file path specified",
+ ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ self._ValueList[0:len(TokenList)] = TokenList
+ self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros)
+
+ ## [nmake] section parser (Edk.x style only)
+ def _NmakeParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ self._ValueList[0:len(TokenList)] = TokenList
+ # remove macros
+ self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros)
+ # remove self-reference in macro setting
+ #self._ValueList[1] = ReplaceMacro(self._ValueList[1], {self._ValueList[0]:''})
+
+ ## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser
+ @ParseMacro
+ def _PcdParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
+ ValueList = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ if len(ValueList) != 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Illegal token space GUID and PCD name format",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ self._ValueList[0:1] = ValueList
+ if len(TokenList) > 1:
+ self._ValueList[2] = TokenList[1]
+ if self._ValueList[0] == '' or self._ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+
+ # if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0.
+ if self._ValueList[2] != '':
+ InfPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
+ if InfPcdValueList[0] in ['True', 'true', 'TRUE']:
+ self._ValueList[2] = TokenList[1].replace(InfPcdValueList[0], '1', 1)
+ elif InfPcdValueList[0] in ['False', 'false', 'FALSE']:
+ self._ValueList[2] = TokenList[1].replace(InfPcdValueList[0], '0', 1)
+ elif isinstance(InfPcdValueList[0], str) and InfPcdValueList[0].find('$(') >= 0:
+ Value = ReplaceExprMacro(InfPcdValueList[0],self._Macros)
+ if Value != '0':
+ self._ValueList[2] = Value
+ if (self._ValueList[0], self._ValueList[1]) not in self.PcdsDict:
+ self.PcdsDict[self._ValueList[0], self._ValueList[1]] = self._SectionType
+ elif self.PcdsDict[self._ValueList[0], self._ValueList[1]] != self._SectionType:
+ EdkLogger.error('Parser', FORMAT_INVALID, "It is not permissible to list a specified PCD in different PCD type sections.",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+
+ ## [depex] section parser
+ @ParseMacro
+ def _DepexParser(self):
+ self._ValueList[0:1] = [self._CurrentLine]
+
+ _SectionParser = {
+ MODEL_UNKNOWN : MetaFileParser._Skip,
+ MODEL_META_DATA_HEADER : MetaFileParser._DefineParser,
+ MODEL_META_DATA_BUILD_OPTION : MetaFileParser._BuildOptionParser,
+ MODEL_EFI_INCLUDE : _IncludeParser, # for Edk.x modules
+ MODEL_EFI_LIBRARY_INSTANCE : MetaFileParser._CommonParser, # for Edk.x modules
+ MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser,
+ MODEL_META_DATA_PACKAGE : MetaFileParser._PathParser,
+ MODEL_META_DATA_NMAKE : _NmakeParser, # for Edk.x modules
+ MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
+ MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
+ MODEL_PCD_FEATURE_FLAG : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX : _PcdParser,
+ MODEL_PCD_DYNAMIC : _PcdParser,
+ MODEL_EFI_SOURCE_FILE : _SourceFileParser,
+ MODEL_EFI_GUID : MetaFileParser._CommonParser,
+ MODEL_EFI_PROTOCOL : MetaFileParser._CommonParser,
+ MODEL_EFI_PPI : MetaFileParser._CommonParser,
+ MODEL_EFI_DEPEX : _DepexParser,
+ MODEL_EFI_BINARY_FILE : _BinaryFileParser,
+ MODEL_META_DATA_USER_EXTENSION : MetaFileParser._SkipUserExtension,
+ }
+
+## DSC file parser class
+#
+# @param FilePath The path of platform description file
+# @param FileType The raw data of DSC file
+# @param Table Database used to retrieve module/package information
+# @param Macros Macros used for replacement in file
+# @param Owner Owner ID (for sub-section parsing)
+# @param From ID from which the data comes (for !INCLUDE directive)
+#
+class DscParser(MetaFileParser):
+ # DSC file supported data types (one type per section)
+ DataType = {
+ TAB_SKUIDS.upper() : MODEL_EFI_SKU_ID,
+ TAB_DEFAULT_STORES.upper() : MODEL_EFI_DEFAULT_STORES,
+ TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
+ TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
+ TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_DEFAULT,
+ TAB_PCDS_DYNAMIC_HII_NULL.upper() : MODEL_PCD_DYNAMIC_HII,
+ TAB_PCDS_DYNAMIC_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_VPD,
+ TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_EX_DEFAULT,
+ TAB_PCDS_DYNAMIC_EX_HII_NULL.upper() : MODEL_PCD_DYNAMIC_EX_HII,
+ TAB_PCDS_DYNAMIC_EX_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_EX_VPD,
+ TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
+ TAB_DSC_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
+ TAB_DSC_DEFINES_EDKGLOBAL : MODEL_META_DATA_GLOBAL_DEFINE,
+ TAB_INCLUDE.upper() : MODEL_META_DATA_INCLUDE,
+ TAB_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+ TAB_IF_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
+ TAB_IF_N_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF,
+ TAB_ELSE_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF,
+ TAB_ELSE.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE,
+ TAB_END_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION,
+ TAB_ERROR.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR,
+ }
+
+ # Valid names in define section
+ DefineKeywords = [
+ "DSC_SPECIFICATION",
+ "PLATFORM_NAME",
+ "PLATFORM_GUID",
+ "PLATFORM_VERSION",
+ "SKUID_IDENTIFIER",
+ "PCD_INFO_GENERATION",
+ "PCD_VAR_CHECK_GENERATION",
+ "SUPPORTED_ARCHITECTURES",
+ "BUILD_TARGETS",
+ "OUTPUT_DIRECTORY",
+ "FLASH_DEFINITION",
+ "BUILD_NUMBER",
+ "RFC_LANGUAGES",
+ "ISO_LANGUAGES",
+ "TIME_STAMP_FILE",
+ "VPD_TOOL_GUID",
+ "FIX_LOAD_TOP_MEMORY_ADDRESS",
+ "PREBUILD",
+ "POSTBUILD"
+ ]
+
+ SubSectionDefineKeywords = [
+ "FILE_GUID"
+ ]
+
+ SymbolPattern = ValueExpression.SymbolPattern
+
+ IncludedFiles = set()
+
+ ## Constructor of DscParser
+ #
+ # Initialize object of DscParser
+ #
+ # @param FilePath The path of platform description file
+ # @param FileType The raw data of DSC file
+ # @param Arch Default Arch value for filtering sections
+ # @param Table Database used to retrieve module/package information
+ # @param Owner Owner ID (for sub-section parsing)
+ # @param From ID from which the data comes (for !INCLUDE directive)
+ #
+ def __init__(self, FilePath, FileType, Arch, Table, Owner= -1, From= -1):
+ # prevent re-initialization
+ if hasattr(self, "_Table") and self._Table is Table:
+ return
+ MetaFileParser.__init__(self, FilePath, FileType, Arch, Table, Owner, From)
+ self._Version = 0x00010005 # Only EDK2 dsc file is supported
+ # to store conditional directive evaluation result
+ self._DirectiveStack = []
+ self._DirectiveEvalStack = []
+ self._Enabled = 1
+
+ #
+ # Specify whether current line is in uncertain condition
+ #
+ self._InDirective = -1
+
+ # Final valid replacable symbols
+ self._Symbols = {}
+ #
+ # Map the ID between the original table and new table to track
+ # the owner item
+ #
+ self._IdMapping = {-1:-1}
+
+ self._Content = None
+
+ ## Parser starter
+ def Start(self):
+ Content = ''
+ try:
+ with open(str(self.MetaFile), 'r') as File:
+ Content = File.readlines()
+ except:
+ EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
+
+ OwnerId = {}
+
+ Content = self.ProcessMultipleLineCODEValue(Content)
+
+ for Index in range(0, len(Content)):
+ Line = CleanString(Content[Index])
+ # skip empty line
+ if Line == '':
+ continue
+
+ self._CurrentLine = Line
+ self._LineIndex = Index
+ if self._InSubsection and self._Owner[-1] == -1:
+ self._Owner.append(self._LastItem)
+
+ # section header
+ if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
+ self._SectionType = MODEL_META_DATA_SECTION_HEADER
+ # subsection ending
+ elif Line[0] == '}' and self._InSubsection:
+ self._InSubsection = False
+ self._SubsectionType = MODEL_UNKNOWN
+ self._SubsectionName = ''
+ self._Owner[-1] = -1
+ OwnerId.clear()
+ continue
+ # subsection header
+ elif Line[0] == TAB_OPTION_START and Line[-1] == TAB_OPTION_END:
+ self._SubsectionType = MODEL_META_DATA_SUBSECTION_HEADER
+ # directive line
+ elif Line[0] == '!':
+ TokenList = GetSplitValueList(Line, ' ', 1)
+ if TokenList[0] == TAB_INCLUDE:
+ for Arch, ModuleType, DefaultStore in self._Scope:
+ if self._SubsectionType != MODEL_UNKNOWN and Arch in OwnerId:
+ self._Owner[-1] = OwnerId[Arch]
+ self._DirectiveParser()
+ else:
+ self._DirectiveParser()
+ continue
+ if Line[0] == TAB_OPTION_START and not self._InSubsection:
+ EdkLogger.error("Parser", FILE_READ_FAILURE, "Missing the '{' before %s in Line %s" % (Line, Index+1), ExtraData=self.MetaFile)
+
+ if self._InSubsection:
+ SectionType = self._SubsectionType
+ else:
+ SectionType = self._SectionType
+ self._ItemType = SectionType
+
+ self._ValueList = ['', '', '']
+ # "SET pcd = pcd_expression" syntax is not supported in Dsc file.
+ if self._CurrentLine.upper().strip().startswith("SET "):
+ EdkLogger.error('Parser', FORMAT_INVALID, '''"SET pcd = pcd_expression" syntax is not support in Dsc file''',
+ ExtraData=self._CurrentLine,
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ self._SectionParser[SectionType](self)
+ if self._ValueList is None:
+ continue
+ #
+ # Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
+ # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
+ #
+ for Arch, ModuleType, DefaultStore in self._Scope:
+ Owner = self._Owner[-1]
+ if self._SubsectionType != MODEL_UNKNOWN and Arch in OwnerId:
+ Owner = OwnerId[Arch]
+ self._LastItem = self._Store(
+ self._ItemType,
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ Arch,
+ ModuleType,
+ DefaultStore,
+ Owner,
+ self._From,
+ self._LineIndex + 1,
+ - 1,
+ self._LineIndex + 1,
+ - 1,
+ self._Enabled
+ )
+ if self._SubsectionType == MODEL_UNKNOWN and self._InSubsection:
+ OwnerId[Arch] = self._LastItem
+
+ if self._DirectiveStack:
+ Type, Line, Text = self._DirectiveStack[-1]
+ EdkLogger.error('Parser', FORMAT_INVALID, "No matching '!endif' found",
+ ExtraData=Text, File=self.MetaFile, Line=Line)
+ self._Done()
+
+ ## <subsection_header> parser
+ def _SubsectionHeaderParser(self):
+ self._SubsectionName = self._CurrentLine[1:-1].upper()
+ if self._SubsectionName in self.DataType:
+ self._SubsectionType = self.DataType[self._SubsectionName]
+ else:
+ self._SubsectionType = MODEL_UNKNOWN
+ EdkLogger.warn("Parser", "Unrecognized sub-section", File=self.MetaFile,
+ Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
+ self._ValueList[0] = self._SubsectionName
+
+ ## Directive statement parser
+ def _DirectiveParser(self):
+ self._ValueList = ['', '', '']
+ TokenList = GetSplitValueList(self._CurrentLine, ' ', 1)
+ self._ValueList[0:len(TokenList)] = TokenList
+
+ # Syntax check
+ DirectiveName = self._ValueList[0].upper()
+ if DirectiveName not in self.DataType:
+ EdkLogger.error("Parser", FORMAT_INVALID, "Unknown directive [%s]" % DirectiveName,
+ File=self.MetaFile, Line=self._LineIndex + 1)
+
+ if DirectiveName in ['!IF', '!IFDEF', '!IFNDEF']:
+ self._InDirective += 1
+
+ if DirectiveName in ['!ENDIF']:
+ self._InDirective -= 1
+
+ if DirectiveName in ['!IF', '!IFDEF', '!INCLUDE', '!IFNDEF', '!ELSEIF'] and self._ValueList[1] == '':
+ EdkLogger.error("Parser", FORMAT_INVALID, "Missing expression",
+ File=self.MetaFile, Line=self._LineIndex + 1,
+ ExtraData=self._CurrentLine)
+
+ ItemType = self.DataType[DirectiveName]
+ Scope = [[TAB_COMMON, TAB_COMMON, TAB_COMMON]]
+ if ItemType == MODEL_META_DATA_INCLUDE:
+ Scope = self._Scope
+ elif ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR:
+ Scope = self._Scope
+ if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF:
+ # Remove all directives between !if and !endif, including themselves
+ while self._DirectiveStack:
+ # Remove any !else or !elseif
+ DirectiveInfo = self._DirectiveStack.pop()
+ if DirectiveInfo[0] in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
+ break
+ else:
+ EdkLogger.error("Parser", FORMAT_INVALID, "Redundant '!endif'",
+ File=self.MetaFile, Line=self._LineIndex + 1,
+ ExtraData=self._CurrentLine)
+ elif ItemType not in {MODEL_META_DATA_INCLUDE, MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR}:
+ # Break if there's a !else is followed by a !elseif
+ if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF and \
+ self._DirectiveStack and \
+ self._DirectiveStack[-1][0] == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE:
+ EdkLogger.error("Parser", FORMAT_INVALID, "'!elseif' after '!else'",
+ File=self.MetaFile, Line=self._LineIndex + 1,
+ ExtraData=self._CurrentLine)
+ self._DirectiveStack.append((ItemType, self._LineIndex + 1, self._CurrentLine))
+
+ #
+ # Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
+ # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
+ #
+ for Arch, ModuleType, DefaultStore in Scope:
+ self._LastItem = self._Store(
+ ItemType,
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ Arch,
+ ModuleType,
+ DefaultStore,
+ self._Owner[-1],
+ self._From,
+ self._LineIndex + 1,
+ - 1,
+ self._LineIndex + 1,
+ - 1,
+ 0
+ )
+
+ ## [defines] section parser
+ @ParseMacro
+ def _DefineParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ self._ValueList[1:len(TokenList)] = TokenList
+
+ # Syntax check
+ if not self._ValueList[1]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No name specified",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ if not self._ValueList[2]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ if (not self._ValueList[1] in self.DefineKeywords and
+ (self._InSubsection and self._ValueList[1] not in self.SubSectionDefineKeywords)):
+ EdkLogger.error('Parser', FORMAT_INVALID,
+ "Unknown keyword found: %s. "
+ "If this is a macro you must "
+ "add it as a DEFINE in the DSC" % self._ValueList[1],
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ if not self._InSubsection:
+ self._Defines[self._ValueList[1]] = self._ValueList[2]
+ self._ItemType = self.DataType[TAB_DSC_DEFINES.upper()]
+
+ @ParseMacro
+ def _SkuIdParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ if len(TokenList) not in (2, 3):
+ EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '<Number>|<UiName>[|<UiName>]'",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ self._ValueList[0:len(TokenList)] = TokenList
+ @ParseMacro
+ def _DefaultStoresParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ if len(TokenList) != 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '<Number>|<UiName>'",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ self._ValueList[0:len(TokenList)] = TokenList
+
+ ## Parse Edk style of library modules
+ @ParseMacro
+ def _LibraryInstanceParser(self):
+ self._ValueList[0] = self._CurrentLine
+
+
+ def _DecodeCODEData(self):
+ pass
+ ## PCD sections parser
+ #
+ # [PcdsFixedAtBuild]
+ # [PcdsPatchableInModule]
+ # [PcdsFeatureFlag]
+ # [PcdsDynamicEx
+ # [PcdsDynamicExDefault]
+ # [PcdsDynamicExVpd]
+ # [PcdsDynamicExHii]
+ # [PcdsDynamic]
+ # [PcdsDynamicDefault]
+ # [PcdsDynamicVpd]
+ # [PcdsDynamicHii]
+ #
+ @ParseMacro
+ def _PcdParser(self):
+ if self._PcdDataTypeCODE:
+ self._PcdCodeValue = self._PcdCodeValue + "\n " + self._CurrentLine
+ if self._CurrentLine.endswith(")}"):
+ self._CurrentLine = "|".join((self._CurrentPcdName, self._PcdCodeValue))
+ self._PcdDataTypeCODE = False
+ self._PcdCodeValue = ""
+ else:
+ self._ValueList = None
+ return
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
+ self._CurrentPcdName = TokenList[0]
+ if len(TokenList) == 2 and TokenList[1].strip().startswith("{CODE"):
+ self._PcdDataTypeCODE = True
+ self._PcdCodeValue = TokenList[1].strip()
+
+ if self._PcdDataTypeCODE:
+ if self._CurrentLine.endswith(")}"):
+ self._PcdDataTypeCODE = False
+ self._PcdCodeValue = ""
+ else:
+ self._ValueList = None
+ return
+ self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ PcdNameTockens = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ if len(PcdNameTockens) == 2:
+ self._ValueList[0], self._ValueList[1] = PcdNameTockens[0], PcdNameTockens[1]
+ elif len(PcdNameTockens) == 3:
+ self._ValueList[0], self._ValueList[1] = ".".join((PcdNameTockens[0], PcdNameTockens[1])), PcdNameTockens[2]
+ elif len(PcdNameTockens) > 3:
+ self._ValueList[0], self._ValueList[1] = ".".join((PcdNameTockens[0], PcdNameTockens[1])), ".".join(PcdNameTockens[2:])
+ if len(TokenList) == 2:
+ self._ValueList[2] = TokenList[1]
+ if self._ValueList[0] == '' or self._ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ if self._ValueList[2] == '':
+ #
+ # The PCD values are optional for FIXEDATBUILD, PATCHABLEINMODULE, Dynamic/DynamicEx default
+ #
+ if self._SectionType in (MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT):
+ return
+ EdkLogger.error('Parser', FORMAT_INVALID, "No PCD value given",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+
+ # Validate the datum type of Dynamic Defaul PCD and DynamicEx Default PCD
+ ValueList = GetSplitValueList(self._ValueList[2])
+ if len(ValueList) > 1 and ValueList[1] in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64] \
+ and self._ItemType in [MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "The datum type '%s' of PCD is wrong" % ValueList[1],
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+
+ # Validate the VariableName of DynamicHii and DynamicExHii for PCD Entry must not be an empty string
+ if self._ItemType in [MODEL_PCD_DYNAMIC_HII, MODEL_PCD_DYNAMIC_EX_HII]:
+ DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
+ if len(DscPcdValueList[0].replace('L', '').replace('"', '').strip()) == 0:
+ EdkLogger.error('Parser', FORMAT_INVALID, "The VariableName field in the HII format PCD entry must not be an empty string",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+
+ # if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0.
+ DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
+ if DscPcdValueList[0] in ['True', 'true', 'TRUE']:
+ self._ValueList[2] = TokenList[1].replace(DscPcdValueList[0], '1', 1);
+ elif DscPcdValueList[0] in ['False', 'false', 'FALSE']:
+ self._ValueList[2] = TokenList[1].replace(DscPcdValueList[0], '0', 1);
+
+
+ ## [components] section parser
+ @ParseMacro
+ def _ComponentParser(self):
+ if self._CurrentLine[-1] == '{':
+ self._ValueList[0] = self._CurrentLine[0:-1].strip()
+ self._InSubsection = True
+ self._SubsectionType = MODEL_UNKNOWN
+ else:
+ self._ValueList[0] = self._CurrentLine
+
+ ## [LibraryClasses] section
+ @ParseMacro
+ def _LibraryClassParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ if len(TokenList) < 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No library class or instance specified",
+ ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ if TokenList[0] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No library class specified",
+ ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ if TokenList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No library instance specified",
+ ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+
+ self._ValueList[0:len(TokenList)] = TokenList
+
+
+ ## [BuildOptions] section parser
+ @ParseMacro
+ def _BuildOptionParser(self):
+ self._CurrentLine = CleanString(self._CurrentLine, BuildOption=True)
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ TokenList2 = GetSplitValueList(TokenList[0], ':', 1)
+ if len(TokenList2) == 2:
+ self._ValueList[0] = TokenList2[0] # toolchain family
+ self._ValueList[1] = TokenList2[1] # keys
+ else:
+ self._ValueList[1] = TokenList[0]
+ if len(TokenList) == 2: # value
+ self._ValueList[2] = TokenList[1]
+
+ if self._ValueList[1].count('_') != 4:
+ EdkLogger.error(
+ 'Parser',
+ FORMAT_INVALID,
+ "'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1],
+ ExtraData=self._CurrentLine,
+ File=self.MetaFile,
+ Line=self._LineIndex + 1
+ )
+
+ ## Override parent's method since we'll do all macro replacements in parser
+ @property
+ def _Macros(self):
+ Macros = {}
+ Macros.update(self._FileLocalMacros)
+ Macros.update(self._GetApplicableSectionMacro())
+ Macros.update(GlobalData.gEdkGlobal)
+ Macros.update(GlobalData.gPlatformDefines)
+ Macros.update(GlobalData.gCommandLineDefines)
+ # PCD cannot be referenced in macro definition
+ if self._ItemType not in [MODEL_META_DATA_DEFINE, MODEL_META_DATA_GLOBAL_DEFINE]:
+ Macros.update(self._Symbols)
+ if GlobalData.BuildOptionPcd:
+ for Item in GlobalData.BuildOptionPcd:
+ if isinstance(Item, tuple):
+ continue
+ PcdName, TmpValue = Item.split("=")
+ TmpValue = BuildOptionValue(TmpValue, self._GuidDict)
+ Macros[PcdName.strip()] = TmpValue
+ return Macros
+
+ def _PostProcess(self):
+ Processer = {
+ MODEL_META_DATA_SECTION_HEADER : self.__ProcessSectionHeader,
+ MODEL_META_DATA_SUBSECTION_HEADER : self.__ProcessSubsectionHeader,
+ MODEL_META_DATA_HEADER : self.__ProcessDefine,
+ MODEL_META_DATA_DEFINE : self.__ProcessDefine,
+ MODEL_META_DATA_GLOBAL_DEFINE : self.__ProcessDefine,
+ MODEL_META_DATA_INCLUDE : self.__ProcessDirective,
+ MODEL_META_DATA_PACKAGE : self.__ProcessPackages,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IF : self.__ProcessDirective,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE : self.__ProcessDirective,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF : self.__ProcessDirective,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF : self.__ProcessDirective,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF : self.__ProcessDirective,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF : self.__ProcessDirective,
+ MODEL_EFI_SKU_ID : self.__ProcessSkuId,
+ MODEL_EFI_DEFAULT_STORES : self.__ProcessDefaultStores,
+ MODEL_EFI_LIBRARY_INSTANCE : self.__ProcessLibraryInstance,
+ MODEL_EFI_LIBRARY_CLASS : self.__ProcessLibraryClass,
+ MODEL_PCD_FIXED_AT_BUILD : self.__ProcessPcd,
+ MODEL_PCD_PATCHABLE_IN_MODULE : self.__ProcessPcd,
+ MODEL_PCD_FEATURE_FLAG : self.__ProcessPcd,
+ MODEL_PCD_DYNAMIC_DEFAULT : self.__ProcessPcd,
+ MODEL_PCD_DYNAMIC_HII : self.__ProcessPcd,
+ MODEL_PCD_DYNAMIC_VPD : self.__ProcessPcd,
+ MODEL_PCD_DYNAMIC_EX_DEFAULT : self.__ProcessPcd,
+ MODEL_PCD_DYNAMIC_EX_HII : self.__ProcessPcd,
+ MODEL_PCD_DYNAMIC_EX_VPD : self.__ProcessPcd,
+ MODEL_META_DATA_COMPONENT : self.__ProcessComponent,
+ MODEL_META_DATA_BUILD_OPTION : self.__ProcessBuildOption,
+ MODEL_UNKNOWN : self._Skip,
+ MODEL_META_DATA_USER_EXTENSION : self._SkipUserExtension,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR : self._ProcessError,
+ }
+
+ self._Table = MetaFileStorage(self._RawTable.DB, self.MetaFile, MODEL_FILE_DSC, True)
+ self._DirectiveStack = []
+ self._DirectiveEvalStack = []
+ self._FileWithError = self.MetaFile
+ self._FileLocalMacros = {}
+ self._SectionsMacroDict.clear()
+ GlobalData.gPlatformDefines = {}
+
+ # Get all macro and PCD which has straitforward value
+ self.__RetrievePcdValue()
+ self._Content = self._RawTable.GetAll()
+ self._ContentIndex = 0
+ self._InSubsection = False
+ while self._ContentIndex < len(self._Content) :
+ Id, self._ItemType, V1, V2, V3, S1, S2, S3, Owner, self._From, \
+ LineStart, ColStart, LineEnd, ColEnd, Enabled = self._Content[self._ContentIndex]
+
+ if self._From < 0:
+ self._FileWithError = self.MetaFile
+
+ self._ContentIndex += 1
+
+ self._Scope = [[S1, S2, S3]]
+ #
+ # For !include directive, handle it specially,
+ # merge arch and module type in case of duplicate items
+ #
+ while self._ItemType == MODEL_META_DATA_INCLUDE:
+ if self._ContentIndex >= len(self._Content):
+ break
+ Record = self._Content[self._ContentIndex]
+ if LineStart == Record[10] and LineEnd == Record[12]:
+ if [Record[5], Record[6], Record[7]] not in self._Scope:
+ self._Scope.append([Record[5], Record[6], Record[7]])
+ self._ContentIndex += 1
+ else:
+ break
+
+ self._LineIndex = LineStart - 1
+ self._ValueList = [V1, V2, V3]
+
+ if Owner > 0 and Owner in self._IdMapping:
+ self._InSubsection = True
+ else:
+ self._InSubsection = False
+ try:
+ Processer[self._ItemType]()
+ except EvaluationException as Excpt:
+ #
+ # Only catch expression evaluation error here. We need to report
+ # the precise number of line on which the error occurred
+ #
+ if hasattr(Excpt, 'Pcd'):
+ if Excpt.Pcd in GlobalData.gPlatformOtherPcds:
+ Info = GlobalData.gPlatformOtherPcds[Excpt.Pcd]
+ EdkLogger.error('Parser', FORMAT_INVALID, "Cannot use this PCD (%s) in an expression as"
+ " it must be defined in a [PcdsFixedAtBuild] or [PcdsFeatureFlag] section"
+ " of the DSC file, and it is currently defined in this section:"
+ " %s, line #: %d." % (Excpt.Pcd, Info[0], Info[1]),
+ File=self._FileWithError, ExtraData=' '.join(self._ValueList),
+ Line=self._LineIndex + 1)
+ else:
+ EdkLogger.error('Parser', FORMAT_INVALID, "PCD (%s) is not defined in DSC file" % Excpt.Pcd,
+ File=self._FileWithError, ExtraData=' '.join(self._ValueList),
+ Line=self._LineIndex + 1)
+ else:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Invalid expression: %s" % str(Excpt),
+ File=self._FileWithError, ExtraData=' '.join(self._ValueList),
+ Line=self._LineIndex + 1)
+ except MacroException as Excpt:
+ EdkLogger.error('Parser', FORMAT_INVALID, str(Excpt),
+ File=self._FileWithError, ExtraData=' '.join(self._ValueList),
+ Line=self._LineIndex + 1)
+
+ if self._ValueList is None:
+ continue
+
+ NewOwner = self._IdMapping.get(Owner, -1)
+ self._Enabled = int((not self._DirectiveEvalStack) or (False not in self._DirectiveEvalStack))
+ self._LastItem = self._Store(
+ self._ItemType,
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ S1,
+ S2,
+ S3,
+ NewOwner,
+ self._From,
+ self._LineIndex + 1,
+ - 1,
+ self._LineIndex + 1,
+ - 1,
+ self._Enabled
+ )
+ self._IdMapping[Id] = self._LastItem
+
+ GlobalData.gPlatformDefines.update(self._FileLocalMacros)
+ self._PostProcessed = True
+ self._Content = None
+ def _ProcessError(self):
+ if not self._Enabled:
+ return
+ EdkLogger.error('Parser', ERROR_STATEMENT, self._ValueList[1], File=self.MetaFile, Line=self._LineIndex + 1)
+
+ def __ProcessSectionHeader(self):
+ self._SectionName = self._ValueList[0]
+ if self._SectionName in self.DataType:
+ self._SectionType = self.DataType[self._SectionName]
+ else:
+ self._SectionType = MODEL_UNKNOWN
+
+ def __ProcessSubsectionHeader(self):
+ self._SubsectionName = self._ValueList[0]
+ if self._SubsectionName in self.DataType:
+ self._SubsectionType = self.DataType[self._SubsectionName]
+ else:
+ self._SubsectionType = MODEL_UNKNOWN
+
+ def __RetrievePcdValue(self):
+ try:
+ with open(str(self.MetaFile), 'r') as File:
+ Content = File.readlines()
+ except:
+ EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
+
+ GlobalData.gPlatformOtherPcds['DSCFILE'] = str(self.MetaFile)
+ for PcdType in (MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_HII,
+ MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_DEFAULT, MODEL_PCD_DYNAMIC_EX_HII,
+ MODEL_PCD_DYNAMIC_EX_VPD):
+ Records = self._RawTable.Query(PcdType, BelongsToItem= -1.0)
+ for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, Dummy4, ID, Line in Records:
+ Name = TokenSpaceGuid + '.' + PcdName
+ if Name not in GlobalData.gPlatformOtherPcds:
+ PcdLine = Line
+ while not Content[Line - 1].lstrip().startswith(TAB_SECTION_START):
+ Line -= 1
+ GlobalData.gPlatformOtherPcds[Name] = (CleanString(Content[Line - 1]), PcdLine, PcdType)
+
+ def __ProcessDefine(self):
+ if not self._Enabled:
+ return
+
+ Type, Name, Value = self._ValueList
+ Value = ReplaceMacro(Value, self._Macros, False)
+ #
+ # If it is <Defines>, return
+ #
+ if self._InSubsection:
+ self._ValueList = [Type, Name, Value]
+ return
+
+ if self._ItemType == MODEL_META_DATA_DEFINE:
+ if self._SectionType == MODEL_META_DATA_HEADER:
+ self._FileLocalMacros[Name] = Value
+ else:
+ self._ConstructSectionMacroDict(Name, Value)
+ elif self._ItemType == MODEL_META_DATA_GLOBAL_DEFINE:
+ GlobalData.gEdkGlobal[Name] = Value
+
+ #
+ # Keyword in [Defines] section can be used as Macros
+ #
+ if (self._ItemType == MODEL_META_DATA_HEADER) and (self._SectionType == MODEL_META_DATA_HEADER):
+ self._FileLocalMacros[Name] = Value
+
+ self._ValueList = [Type, Name, Value]
+
+ def __ProcessDirective(self):
+ Result = None
+ if self._ItemType in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF]:
+ Macros = self._Macros
+ Macros.update(GlobalData.gGlobalDefines)
+ try:
+ Result = ValueExpression(self._ValueList[1], Macros)()
+ except SymbolNotFound as Exc:
+ EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc), self._ValueList[1])
+ Result = False
+ except WrnExpression as Excpt:
+ #
+ # Catch expression evaluation warning here. We need to report
+ # the precise number of line and return the evaluation result
+ #
+ EdkLogger.warn('Parser', "Suspicious expression: %s" % str(Excpt),
+ File=self._FileWithError, ExtraData=' '.join(self._ValueList),
+ Line=self._LineIndex + 1)
+ Result = Excpt.result
+
+ if self._ItemType in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
+ self._DirectiveStack.append(self._ItemType)
+ if self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_IF:
+ Result = bool(Result)
+ else:
+ Macro = self._ValueList[1]
+ Macro = Macro[2:-1] if (Macro.startswith("$(") and Macro.endswith(")")) else Macro
+ Result = Macro in self._Macros
+ if self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF:
+ Result = not Result
+ self._DirectiveEvalStack.append(Result)
+ elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF:
+ self._DirectiveStack.append(self._ItemType)
+ self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
+ self._DirectiveEvalStack.append(bool(Result))
+ elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE:
+ self._DirectiveStack.append(self._ItemType)
+ self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
+ self._DirectiveEvalStack.append(True)
+ elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF:
+ # Back to the nearest !if/!ifdef/!ifndef
+ while self._DirectiveStack:
+ self._DirectiveEvalStack.pop()
+ Directive = self._DirectiveStack.pop()
+ if Directive in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
+ break
+ elif self._ItemType == MODEL_META_DATA_INCLUDE:
+ # The included file must be relative to workspace or same directory as DSC file
+ __IncludeMacros = {}
+ #
+ # Allow using system environment variables in path after !include
+ #
+ __IncludeMacros['WORKSPACE'] = GlobalData.gGlobalDefines['WORKSPACE']
+ #
+ # Allow using MACROs comes from [Defines] section to keep compatible.
+ #
+ __IncludeMacros.update(self._Macros)
+
+ IncludedFile = NormPath(ReplaceMacro(self._ValueList[1], __IncludeMacros, RaiseError=True))
+ #
+ # First search the include file under the same directory as DSC file
+ #
+ IncludedFile1 = PathClass(IncludedFile, self.MetaFile.Dir)
+ if self._Enabled:
+ ErrorCode, ErrorInfo1 = IncludedFile1.Validate()
+ if ErrorCode != 0:
+ #
+ # Also search file under the WORKSPACE directory
+ #
+ IncludedFile1 = PathClass(IncludedFile, GlobalData.gWorkspace)
+ ErrorCode, ErrorInfo2 = IncludedFile1.Validate()
+ if ErrorCode != 0:
+ EdkLogger.error('parser', ErrorCode, File=self._FileWithError,
+ Line=self._LineIndex + 1, ExtraData=ErrorInfo1 + "\n" + ErrorInfo2)
+
+ self._FileWithError = IncludedFile1
+
+ FromItem = self._Content[self._ContentIndex - 1][0]
+ if self._InSubsection:
+ Owner = self._Content[self._ContentIndex - 1][8]
+ else:
+ Owner = self._Content[self._ContentIndex - 1][0]
+ IncludedFileTable = MetaFileStorage(self._RawTable.DB, IncludedFile1, MODEL_FILE_DSC, False, FromItem=FromItem)
+ Parser = DscParser(IncludedFile1, self._FileType, self._Arch, IncludedFileTable,
+ Owner=Owner, From=FromItem)
+
+ self.IncludedFiles.add (IncludedFile1)
+
+ # set the parser status with current status
+ Parser._SectionName = self._SectionName
+ Parser._SubsectionType = self._SubsectionType
+ Parser._InSubsection = self._InSubsection
+ Parser._SectionType = self._SectionType
+ Parser._Scope = self._Scope
+ Parser._Enabled = self._Enabled
+ # Parse the included file
+ Parser.StartParse()
+ # Insert all records in the table for the included file into dsc file table
+ Records = IncludedFileTable.GetAll()
+ if Records:
+ self._Content[self._ContentIndex:self._ContentIndex] = Records
+ self._Content.pop(self._ContentIndex - 1)
+ self._ValueList = None
+ self._ContentIndex -= 1
+
+ def __ProcessPackages(self):
+ self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros)
+
+ def __ProcessSkuId(self):
+ self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=True)
+ for Value in self._ValueList]
+ def __ProcessDefaultStores(self):
+ self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=True)
+ for Value in self._ValueList]
+
+ def __ProcessLibraryInstance(self):
+ self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
+
+ def __ProcessLibraryClass(self):
+ self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros, RaiseError=True)
+
+ def __ProcessPcd(self):
+ if self._ItemType not in [MODEL_PCD_FEATURE_FLAG, MODEL_PCD_FIXED_AT_BUILD]:
+ self._ValueList[2] = ReplaceMacro(self._ValueList[2], self._Macros, RaiseError=True)
+ return
+
+ ValList, Valid, Index = AnalyzeDscPcd(self._ValueList[2], self._ItemType)
+ if not Valid:
+ if self._ItemType in (MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT, MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE):
+ if ValList[1] != TAB_VOID and StructPattern.match(ValList[1]) is None and ValList[2]:
+ EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect. The datum type info should be VOID* or a valid struct name.", File=self._FileWithError,
+ Line=self._LineIndex + 1, ExtraData="%s.%s|%s" % (self._ValueList[0], self._ValueList[1], self._ValueList[2]))
+ EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self._FileWithError, Line=self._LineIndex + 1,
+ ExtraData="%s.%s|%s" % (self._ValueList[0], self._ValueList[1], self._ValueList[2]))
+ PcdValue = ValList[Index]
+ if PcdValue and "." not in self._ValueList[0]:
+ try:
+ ValList[Index] = ValueExpression(PcdValue, self._Macros)(True)
+ except WrnExpression as Value:
+ ValList[Index] = Value.result
+ except:
+ pass
+
+ if ValList[Index] == 'True':
+ ValList[Index] = '1'
+ if ValList[Index] == 'False':
+ ValList[Index] = '0'
+
+ if (not self._DirectiveEvalStack) or (False not in self._DirectiveEvalStack):
+ GlobalData.gPlatformPcds[TAB_SPLIT.join(self._ValueList[0:2])] = PcdValue
+ self._Symbols[TAB_SPLIT.join(self._ValueList[0:2])] = PcdValue
+ try:
+ self._ValueList[2] = '|'.join(ValList)
+ except Exception:
+ print(ValList)
+
+ def __ProcessComponent(self):
+ self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros)
+
+ def __ProcessBuildOption(self):
+ self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=False)
+ for Value in self._ValueList]
+
+ def DisableOverrideComponent(self,module_id):
+ for ori_id in self._IdMapping:
+ if self._IdMapping[ori_id] == module_id:
+ self._RawTable.DisableComponent(ori_id)
+
+ _SectionParser = {
+ MODEL_META_DATA_HEADER : _DefineParser,
+ MODEL_EFI_SKU_ID : _SkuIdParser,
+ MODEL_EFI_DEFAULT_STORES : _DefaultStoresParser,
+ MODEL_EFI_LIBRARY_INSTANCE : _LibraryInstanceParser,
+ MODEL_EFI_LIBRARY_CLASS : _LibraryClassParser,
+ MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
+ MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
+ MODEL_PCD_FEATURE_FLAG : _PcdParser,
+ MODEL_PCD_DYNAMIC_DEFAULT : _PcdParser,
+ MODEL_PCD_DYNAMIC_HII : _PcdParser,
+ MODEL_PCD_DYNAMIC_VPD : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX_DEFAULT : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX_HII : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX_VPD : _PcdParser,
+ MODEL_META_DATA_COMPONENT : _ComponentParser,
+ MODEL_META_DATA_BUILD_OPTION : _BuildOptionParser,
+ MODEL_UNKNOWN : MetaFileParser._Skip,
+ MODEL_META_DATA_PACKAGE : MetaFileParser._PackageParser,
+ MODEL_META_DATA_USER_EXTENSION : MetaFileParser._SkipUserExtension,
+ MODEL_META_DATA_SECTION_HEADER : MetaFileParser._SectionHeaderParser,
+ MODEL_META_DATA_SUBSECTION_HEADER : _SubsectionHeaderParser,
+ }
+
+## DEC file parser class
+#
+# @param FilePath The path of platform description file
+# @param FileType The raw data of DSC file
+# @param Table Database used to retrieve module/package information
+# @param Macros Macros used for replacement in file
+#
+class DecParser(MetaFileParser):
+ # DEC file supported data types (one type per section)
+ DataType = {
+ TAB_DEC_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
+ TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION,
+ }
+
+ ## Constructor of DecParser
+ #
+ # Initialize object of DecParser
+ #
+ # @param FilePath The path of platform description file
+ # @param FileType The raw data of DSC file
+ # @param Arch Default Arch value for filtering sections
+ # @param Table Database used to retrieve module/package information
+ #
+ def __init__(self, FilePath, FileType, Arch, Table):
+ # prevent re-initialization
+ if hasattr(self, "_Table"):
+ return
+ MetaFileParser.__init__(self, FilePath, FileType, Arch, Table, -1)
+ self._Comments = []
+ self._Version = 0x00010005 # Only EDK2 dec file is supported
+ self._AllPCDs = [] # Only for check duplicate PCD
+ self._AllPcdDict = {}
+
+ self._CurrentStructurePcdName = ""
+ self._include_flag = False
+ self._package_flag = False
+
+ self._RestofValue = ""
+
+ ## Parser starter
+ def Start(self):
+ Content = ''
+ try:
+ with open(str(self.MetaFile), 'r') as File:
+ Content = File.readlines()
+ except:
+ EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
+
+ Content = self.ProcessMultipleLineCODEValue(Content)
+
+ self._DefinesCount = 0
+ for Index in range(0, len(Content)):
+ Line, Comment = CleanString2(Content[Index])
+ self._CurrentLine = Line
+ self._LineIndex = Index
+
+ # save comment for later use
+ if Comment:
+ self._Comments.append((Comment, self._LineIndex + 1))
+ # skip empty line
+ if Line == '':
+ continue
+
+ # section header
+ if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
+ self._SectionHeaderParser()
+ if self._SectionName == TAB_DEC_DEFINES.upper():
+ self._DefinesCount += 1
+ self._Comments = []
+ continue
+ if self._SectionType == MODEL_UNKNOWN:
+ EdkLogger.error("Parser", FORMAT_INVALID,
+ ""
+ "Not able to determine \"%s\" in which section."%self._CurrentLine,
+ self.MetaFile, self._LineIndex + 1)
+ elif len(self._SectionType) == 0:
+ self._Comments = []
+ continue
+
+ # section content
+ self._ValueList = ['', '', '']
+ self._SectionParser[self._SectionType[0]](self)
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
+ self._ItemType = -1
+ self._Comments = []
+ continue
+
+ #
+ # Model, Value1, Value2, Value3, Arch, BelongsToItem=-1, LineBegin=-1,
+ # ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, FeatureFlag='', Enabled=-1
+ #
+ for Arch, ModuleType, Type in self._Scope:
+ self._LastItem = self._Store(
+ Type,
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ Arch,
+ ModuleType,
+ self._Owner[-1],
+ self._LineIndex + 1,
+ - 1,
+ self._LineIndex + 1,
+ - 1,
+ 0
+ )
+ for Comment, LineNo in self._Comments:
+ self._Store(
+ MODEL_META_DATA_COMMENT,
+ Comment,
+ self._ValueList[0],
+ self._ValueList[1],
+ Arch,
+ ModuleType,
+ self._LastItem,
+ LineNo,
+ - 1,
+ LineNo,
+ - 1,
+ 0
+ )
+ self._Comments = []
+ if self._DefinesCount > 1:
+ EdkLogger.error('Parser', FORMAT_INVALID, 'Multiple [Defines] section is exist.', self.MetaFile )
+ if self._DefinesCount == 0:
+ EdkLogger.error('Parser', FORMAT_INVALID, 'No [Defines] section exist.', self.MetaFile)
+ self._Done()
+
+
+ ## Section header parser
+ #
+ # The section header is always in following format:
+ #
+ # [section_name.arch<.platform|module_type>]
+ #
+ def _SectionHeaderParser(self):
+ self._Scope = []
+ self._SectionName = ''
+ self._SectionType = []
+ ArchList = set()
+ PrivateList = set()
+ Line = re.sub(',[\s]*', TAB_COMMA_SPLIT, self._CurrentLine)
+ for Item in Line[1:-1].split(TAB_COMMA_SPLIT):
+ if Item == '':
+ EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR,
+ "section name can NOT be empty or incorrectly use separator comma",
+ self.MetaFile, self._LineIndex + 1, self._CurrentLine)
+ ItemList = Item.split(TAB_SPLIT)
+
+ # different types of PCD are permissible in one section
+ self._SectionName = ItemList[0].upper()
+ if self._SectionName == TAB_DEC_DEFINES.upper() and (len(ItemList) > 1 or len(Line.split(TAB_COMMA_SPLIT)) > 1):
+ EdkLogger.error("Parser", FORMAT_INVALID, "Defines section format is invalid",
+ self.MetaFile, self._LineIndex + 1, self._CurrentLine)
+ if self._SectionName in self.DataType:
+ if self.DataType[self._SectionName] not in self._SectionType:
+ self._SectionType.append(self.DataType[self._SectionName])
+ else:
+ EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
+ self.MetaFile, self._LineIndex + 1, self._CurrentLine)
+
+ if MODEL_PCD_FEATURE_FLAG in self._SectionType and len(self._SectionType) > 1:
+ EdkLogger.error(
+ 'Parser',
+ FORMAT_INVALID,
+ "%s must not be in the same section of other types of PCD" % TAB_PCDS_FEATURE_FLAG_NULL,
+ File=self.MetaFile,
+ Line=self._LineIndex + 1,
+ ExtraData=self._CurrentLine
+ )
+ # S1 is always Arch
+ if len(ItemList) > 1:
+ S1 = ItemList[1].upper()
+ else:
+ S1 = TAB_ARCH_COMMON
+ ArchList.add(S1)
+ # S2 may be Platform or ModuleType
+ if len(ItemList) > 2:
+ S2 = ItemList[2].upper()
+ # only Includes, GUIDs, PPIs, Protocols section have Private tag
+ if self._SectionName in [TAB_INCLUDES.upper(), TAB_GUIDS.upper(), TAB_PROTOCOLS.upper(), TAB_PPIS.upper()]:
+ if S2 != 'PRIVATE':
+ EdkLogger.error("Parser", FORMAT_INVALID, 'Please use keyword "Private" as section tag modifier.',
+ File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
+ else:
+ S2 = TAB_COMMON
+ PrivateList.add(S2)
+ if [S1, S2, self.DataType[self._SectionName]] not in self._Scope:
+ self._Scope.append([S1, S2, self.DataType[self._SectionName]])
+
+ # 'COMMON' must not be used with specific ARCHs at the same section
+ if TAB_ARCH_COMMON in ArchList and len(ArchList) > 1:
+ EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
+ File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
+
+ # It is not permissible to mix section tags without the Private attribute with section tags with the Private attribute
+ if TAB_COMMON in PrivateList and len(PrivateList) > 1:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Can't mix section tags without the Private attribute with section tags with the Private attribute",
+ File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
+
+ ## [guids], [ppis] and [protocols] section parser
+ @ParseMacro
+ def _GuidParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ if len(TokenList) < 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name or value specified",
+ ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ if TokenList[0] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name specified",
+ ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ if TokenList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No GUID value specified",
+ ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ if TokenList[1][0] != '{' or TokenList[1][-1] != '}' or GuidStructureStringToGuidString(TokenList[1]) == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "Invalid GUID value format",
+ ExtraData=self._CurrentLine + \
+ " (<CName> = <GuidValueInCFormat:{8,4,4,{2,2,2,2,2,2,2,2}}>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ self._ValueList[0] = TokenList[0]
+ self._ValueList[1] = TokenList[1]
+ if self._ValueList[0] not in self._GuidDict:
+ self._GuidDict[self._ValueList[0]] = self._ValueList[1]
+
+ def ParsePcdName(self,namelist):
+ if "[" in namelist[1]:
+ pcdname = namelist[1][:namelist[1].index("[")]
+ arrayindex = namelist[1][namelist[1].index("["):]
+ namelist[1] = pcdname
+ if len(namelist) == 2:
+ namelist.append(arrayindex)
+ else:
+ namelist[2] = ".".join((arrayindex,namelist[2]))
+ return namelist
+
+ ## PCD sections parser
+ #
+ # [PcdsFixedAtBuild]
+ # [PcdsPatchableInModule]
+ # [PcdsFeatureFlag]
+ # [PcdsDynamicEx
+ # [PcdsDynamic]
+ #
+ @ParseMacro
+ def _PcdParser(self):
+
+ if self._CurrentStructurePcdName:
+ self._ValueList[0] = self._CurrentStructurePcdName
+
+ if "|" not in self._CurrentLine:
+ if "<HeaderFiles>" == self._CurrentLine:
+ self._include_flag = True
+ self._package_flag = False
+ self._ValueList = None
+ return
+ if "<Packages>" == self._CurrentLine:
+ self._package_flag = True
+ self._ValueList = None
+ self._include_flag = False
+ return
+
+ if self._include_flag:
+ self._ValueList[1] = "<HeaderFiles>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()
+ self._ValueList[2] = self._CurrentLine
+ if self._package_flag and "}" != self._CurrentLine:
+ self._ValueList[1] = "<Packages>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()
+ self._ValueList[2] = self._CurrentLine
+ if self._CurrentLine == "}":
+ self._package_flag = False
+ self._include_flag = False
+ self._ValueList = None
+ return
+ else:
+ PcdTockens = self._CurrentLine.split(TAB_VALUE_SPLIT)
+ PcdNames = self.ParsePcdName(PcdTockens[0].split(TAB_SPLIT))
+ if len(PcdNames) == 2:
+ if PcdNames[1].strip().endswith("]"):
+ PcdName = PcdNames[1][:PcdNames[1].index('[')]
+ Index = PcdNames[1][PcdNames[1].index('['):]
+ self._ValueList[0] = TAB_SPLIT.join((PcdNames[0],PcdName))
+ self._ValueList[1] = Index
+ self._ValueList[2] = PcdTockens[1]
+ else:
+ self._CurrentStructurePcdName = ""
+ else:
+ if self._CurrentStructurePcdName != TAB_SPLIT.join(PcdNames[:2]):
+ EdkLogger.error('Parser', FORMAT_INVALID, "Pcd Name does not match: %s and %s " % (self._CurrentStructurePcdName, TAB_SPLIT.join(PcdNames[:2])),
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ self._ValueList[1] = TAB_SPLIT.join(PcdNames[2:])
+ self._ValueList[2] = PcdTockens[1]
+ if not self._CurrentStructurePcdName:
+ if self._PcdDataTypeCODE:
+ if ")}" in self._CurrentLine:
+ ValuePart,RestofValue = self._CurrentLine.split(")}")
+ self._PcdCodeValue = self._PcdCodeValue + "\n " + ValuePart
+ self._CurrentLine = "|".join((self._CurrentPcdName, self._PcdCodeValue,RestofValue))
+ self._PcdDataTypeCODE = False
+ self._PcdCodeValue = ""
+ else:
+ self._PcdCodeValue = self._PcdCodeValue + "\n " + self._CurrentLine
+ self._ValueList = None
+ return
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
+ self._CurrentPcdName = TokenList[0]
+ if len(TokenList) == 2 and TokenList[1].strip().startswith("{CODE"):
+ if ")}" in self._CurrentLine:
+ self._PcdDataTypeCODE = False
+ self._PcdCodeValue = ""
+ else:
+ self._PcdDataTypeCODE = True
+ self._PcdCodeValue = TokenList[1].strip()
+ self._ValueList = None
+ return
+
+ self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ ValueRe = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*')
+ # check PCD information
+ if self._ValueList[0] == '' or self._ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ # check format of token space GUID CName
+ if not ValueRe.match(self._ValueList[0]):
+ EdkLogger.error('Parser', FORMAT_INVALID, "The format of the token space GUID CName is invalid. The correct format is '(a-zA-Z_)[a-zA-Z0-9_]*'",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ # check format of PCD CName
+ if not ValueRe.match(self._ValueList[1]):
+ EdkLogger.error('Parser', FORMAT_INVALID, "The format of the PCD CName is invalid. The correct format is '(a-zA-Z_)[a-zA-Z0-9_]*'",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ # check PCD datum information
+ if len(TokenList) < 2 or TokenList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No PCD Datum information given",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+
+
+ ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
+ PtrValue = ValueRe.findall(TokenList[1])
+
+ # Has VOID* type string, may contain "|" character in the string.
+ if len(PtrValue) != 0:
+ ptrValueList = re.sub(ValueRe, '', TokenList[1])
+ ValueList = AnalyzePcdExpression(ptrValueList)
+ ValueList[0] = PtrValue[0]
+ else:
+ ValueList = AnalyzePcdExpression(TokenList[1])
+
+
+ # check if there's enough datum information given
+ if len(ValueList) != 3:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Invalid PCD Datum information given",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ # check default value
+ if ValueList[0] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "Missing DefaultValue in PCD Datum information",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ # check datum type
+ if ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "Missing DatumType in PCD Datum information",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+ # check token of the PCD
+ if ValueList[2] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "Missing Token in PCD Datum information",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex + 1)
+
+ PcdValue = ValueList[0]
+ if PcdValue:
+ try:
+ self._GuidDict.update(self._AllPcdDict)
+ ValueList[0] = ValueExpressionEx(ValueList[0], ValueList[1], self._GuidDict)(True)
+ except BadExpression as Value:
+ EdkLogger.error('Parser', FORMAT_INVALID, Value, ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ # check format of default value against the datum type
+ IsValid, Cause = CheckPcdDatum(ValueList[1], ValueList[0])
+ if not IsValid:
+ EdkLogger.error('Parser', FORMAT_INVALID, Cause, ExtraData=self._CurrentLine,
+ File=self.MetaFile, Line=self._LineIndex + 1)
+
+ if Cause == "StructurePcd":
+ self._CurrentStructurePcdName = TAB_SPLIT.join(self._ValueList[0:2])
+ self._ValueList[0] = self._CurrentStructurePcdName
+ self._ValueList[1] = ValueList[1].strip()
+
+ if ValueList[0] in ['True', 'true', 'TRUE']:
+ ValueList[0] = '1'
+ elif ValueList[0] in ['False', 'false', 'FALSE']:
+ ValueList[0] = '0'
+
+ # check for duplicate PCD definition
+ if (self._Scope[0], self._ValueList[0], self._ValueList[1]) in self._AllPCDs:
+ EdkLogger.error('Parser', FORMAT_INVALID,
+ "The same PCD name and GUID have been already defined",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+ else:
+ self._AllPCDs.append((self._Scope[0], self._ValueList[0], self._ValueList[1]))
+ self._AllPcdDict[TAB_SPLIT.join(self._ValueList[0:2])] = ValueList[0]
+
+ self._ValueList[2] = ValueList[0].strip() + '|' + ValueList[1].strip() + '|' + ValueList[2].strip()
+
+ _SectionParser = {
+ MODEL_META_DATA_HEADER : MetaFileParser._DefineParser,
+ MODEL_EFI_INCLUDE : MetaFileParser._PathParser,
+ MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser,
+ MODEL_EFI_GUID : _GuidParser,
+ MODEL_EFI_PPI : _GuidParser,
+ MODEL_EFI_PROTOCOL : _GuidParser,
+ MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
+ MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
+ MODEL_PCD_FEATURE_FLAG : _PcdParser,
+ MODEL_PCD_DYNAMIC : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX : _PcdParser,
+ MODEL_UNKNOWN : MetaFileParser._Skip,
+ MODEL_META_DATA_USER_EXTENSION : MetaFileParser._SkipUserExtension,
+ }
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaFileTable.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaFileTable.py
new file mode 100755
index 00000000..7635d893
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/MetaFileTable.py
@@ -0,0 +1,430 @@
+## @file
+# This file is used to create/update/query/erase a meta file table
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import uuid
+
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import FORMAT_INVALID
+
+from CommonDataClass.DataClass import MODEL_FILE_DSC, MODEL_FILE_DEC, MODEL_FILE_INF, \
+ MODEL_FILE_OTHERS
+from Common.DataType import *
+
+class MetaFileTable():
+ # TRICK: use file ID as the part before '.'
+ _ID_STEP_ = 1
+ _ID_MAX_ = 99999999
+
+ ## Constructor
+ def __init__(self, DB, MetaFile, FileType, Temporary, FromItem=None):
+ self.MetaFile = MetaFile
+ self.TableName = ""
+ self.DB = DB
+ self._NumpyTab = None
+
+ self.CurrentContent = []
+ DB.TblFile.append([MetaFile.Name,
+ MetaFile.Ext,
+ MetaFile.Dir,
+ MetaFile.Path,
+ FileType,
+ MetaFile.TimeStamp,
+ FromItem])
+ self.FileId = len(DB.TblFile)
+ self.ID = self.FileId * 10**8
+ if Temporary:
+ self.TableName = "_%s_%s_%s" % (FileType, len(DB.TblFile), uuid.uuid4().hex)
+ else:
+ self.TableName = "_%s_%s" % (FileType, len(DB.TblFile))
+
+ def IsIntegrity(self):
+ Result = False
+ try:
+ TimeStamp = self.MetaFile.TimeStamp
+ if not self.CurrentContent:
+ Result = False
+ else:
+ Result = self.CurrentContent[-1][0] < 0
+ except Exception as Exc:
+ EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc))
+ return False
+ return Result
+
+ def SetEndFlag(self):
+ self.CurrentContent.append(self._DUMMY_)
+
+ def GetAll(self):
+ return [item for item in self.CurrentContent if item[0] >= 0 and item[-1]>=0]
+
+## Python class representation of table storing module data
+class ModuleTable(MetaFileTable):
+ _COLUMN_ = '''
+ ID REAL PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 TEXT NOT NULL,
+ Value2 TEXT,
+ Value3 TEXT,
+ Scope1 TEXT,
+ Scope2 TEXT,
+ BelongsToItem REAL NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ '''
+ # used as table end flag, in case the changes to database is not committed to db file
+ _DUMMY_ = [-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1]
+
+ ## Constructor
+ def __init__(self, Db, MetaFile, Temporary):
+ MetaFileTable.__init__(self, Db, MetaFile, MODEL_FILE_INF, Temporary)
+
+ ## Insert a record into table Inf
+ #
+ # @param Model: Model of a Inf item
+ # @param Value1: Value1 of a Inf item
+ # @param Value2: Value2 of a Inf item
+ # @param Value3: Value3 of a Inf item
+ # @param Scope1: Arch of a Inf item
+ # @param Scope2 Platform os a Inf item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param StartLine: StartLine of a Inf item
+ # @param StartColumn: StartColumn of a Inf item
+ # @param EndLine: EndLine of a Inf item
+ # @param EndColumn: EndColumn of a Inf item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON,
+ BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
+
+ (Value1, Value2, Value3, Scope1, Scope2) = (Value1.strip(), Value2.strip(), Value3.strip(), Scope1.strip(), Scope2.strip())
+ self.ID = self.ID + self._ID_STEP_
+ if self.ID >= (MODEL_FILE_INF + self._ID_MAX_):
+ self.ID = MODEL_FILE_INF + self._ID_STEP_
+
+ row = [ self.ID,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
+ Scope2,
+ BelongsToItem,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
+ Enabled
+ ]
+ self.CurrentContent.append(row)
+ return self.ID
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ # @param Arch: The Arch attribute of Record
+ # @param Platform The Platform attribute of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model, Arch=None, Platform=None, BelongsToItem=None):
+
+ QueryTab = self.CurrentContent
+ result = [item for item in QueryTab if item[1] == Model and item[-1]>=0 ]
+
+ if Arch is not None and Arch != TAB_ARCH_COMMON:
+ ArchList = set(['COMMON'])
+ ArchList.add(Arch)
+ result = [item for item in result if item[5] in ArchList]
+
+ if Platform is not None and Platform != TAB_COMMON:
+ Platformlist = set( ['COMMON','DEFAULT'])
+ Platformlist.add(Platform)
+ result = [item for item in result if item[6] in Platformlist]
+
+ if BelongsToItem is not None:
+ result = [item for item in result if item[7] == BelongsToItem]
+
+ result = [ [r[2],r[3],r[4],r[5],r[6],r[0],r[8]] for r in result ]
+ return result
+
+## Python class representation of table storing package data
+class PackageTable(MetaFileTable):
+ _COLUMN_ = '''
+ ID REAL PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 TEXT NOT NULL,
+ Value2 TEXT,
+ Value3 TEXT,
+ Scope1 TEXT,
+ Scope2 TEXT,
+ BelongsToItem REAL NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ '''
+ # used as table end flag, in case the changes to database is not committed to db file
+ _DUMMY_ = [-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1]
+
+ ## Constructor
+ def __init__(self, Cursor, MetaFile, Temporary):
+ MetaFileTable.__init__(self, Cursor, MetaFile, MODEL_FILE_DEC, Temporary)
+
+ ## Insert table
+ #
+ # Insert a record into table Dec
+ #
+ # @param Model: Model of a Dec item
+ # @param Value1: Value1 of a Dec item
+ # @param Value2: Value2 of a Dec item
+ # @param Value3: Value3 of a Dec item
+ # @param Scope1: Arch of a Dec item
+ # @param Scope2: Module type of a Dec item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param StartLine: StartLine of a Dec item
+ # @param StartColumn: StartColumn of a Dec item
+ # @param EndLine: EndLine of a Dec item
+ # @param EndColumn: EndColumn of a Dec item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON,
+ BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
+ (Value1, Value2, Value3, Scope1, Scope2) = (Value1.strip(), Value2.strip(), Value3.strip(), Scope1.strip(), Scope2.strip())
+ self.ID = self.ID + self._ID_STEP_
+
+ row = [ self.ID,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
+ Scope2,
+ BelongsToItem,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
+ Enabled
+ ]
+ self.CurrentContent.append(row)
+ return self.ID
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ # @param Arch: The Arch attribute of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model, Arch=None):
+
+ QueryTab = self.CurrentContent
+ result = [item for item in QueryTab if item[1] == Model and item[-1]>=0 ]
+
+ if Arch is not None and Arch != TAB_ARCH_COMMON:
+ ArchList = set(['COMMON'])
+ ArchList.add(Arch)
+ result = [item for item in result if item[5] in ArchList]
+
+ return [[r[2], r[3], r[4], r[5], r[6], r[0], r[8]] for r in result]
+
+ def GetValidExpression(self, TokenSpaceGuid, PcdCName):
+
+ QueryTab = self.CurrentContent
+ result = [[item[2], item[8]] for item in QueryTab if item[3] == TokenSpaceGuid and item[4] == PcdCName]
+ validateranges = []
+ validlists = []
+ expressions = []
+ try:
+ for row in result:
+ comment = row[0]
+
+ LineNum = row[1]
+ comment = comment.strip("#")
+ comment = comment.strip()
+ oricomment = comment
+ if comment.startswith("@ValidRange"):
+ comment = comment.replace("@ValidRange", "", 1)
+ validateranges.append(comment.split("|")[1].strip())
+ if comment.startswith("@ValidList"):
+ comment = comment.replace("@ValidList", "", 1)
+ validlists.append(comment.split("|")[1].strip())
+ if comment.startswith("@Expression"):
+ comment = comment.replace("@Expression", "", 1)
+ expressions.append(comment.split("|")[1].strip())
+ except Exception as Exc:
+ ValidType = ""
+ if oricomment.startswith("@ValidRange"):
+ ValidType = "@ValidRange"
+ if oricomment.startswith("@ValidList"):
+ ValidType = "@ValidList"
+ if oricomment.startswith("@Expression"):
+ ValidType = "@Expression"
+ EdkLogger.error('Parser', FORMAT_INVALID, "The syntax for %s of PCD %s.%s is incorrect" % (ValidType, TokenSpaceGuid, PcdCName),
+ ExtraData=oricomment, File=self.MetaFile, Line=LineNum)
+ return set(), set(), set()
+ return set(validateranges), set(validlists), set(expressions)
+
+## Python class representation of table storing platform data
+class PlatformTable(MetaFileTable):
+ _COLUMN_ = '''
+ ID REAL PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 TEXT NOT NULL,
+ Value2 TEXT,
+ Value3 TEXT,
+ Scope1 TEXT,
+ Scope2 TEXT,
+ Scope3 TEXT,
+ BelongsToItem REAL NOT NULL,
+ FromItem REAL NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ '''
+ # used as table end flag, in case the changes to database is not committed to db file
+ _DUMMY_ = [-1, -1, '====', '====', '====', '====', '====','====', -1, -1, -1, -1, -1, -1, -1]
+
+ ## Constructor
+ def __init__(self, Cursor, MetaFile, Temporary, FromItem=0):
+ MetaFileTable.__init__(self, Cursor, MetaFile, MODEL_FILE_DSC, Temporary, FromItem)
+
+ ## Insert table
+ #
+ # Insert a record into table Dsc
+ #
+ # @param Model: Model of a Dsc item
+ # @param Value1: Value1 of a Dsc item
+ # @param Value2: Value2 of a Dsc item
+ # @param Value3: Value3 of a Dsc item
+ # @param Scope1: Arch of a Dsc item
+ # @param Scope2: Module type of a Dsc item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param FromItem: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Dsc item
+ # @param StartColumn: StartColumn of a Dsc item
+ # @param EndLine: EndLine of a Dsc item
+ # @param EndColumn: EndColumn of a Dsc item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON, Scope3=TAB_DEFAULT_STORES_DEFAULT,BelongsToItem=-1,
+ FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1):
+ (Value1, Value2, Value3, Scope1, Scope2, Scope3) = (Value1.strip(), Value2.strip(), Value3.strip(), Scope1.strip(), Scope2.strip(), Scope3.strip())
+ self.ID = self.ID + self._ID_STEP_
+
+ row = [ self.ID,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
+ Scope2,
+ Scope3,
+ BelongsToItem,
+ FromItem,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
+ Enabled
+ ]
+ self.CurrentContent.append(row)
+ return self.ID
+
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ # @param Scope1: Arch of a Dsc item
+ # @param Scope2: Module type of a Dsc item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param FromItem: The item belongs to which dsc file
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model, Scope1=None, Scope2=None, BelongsToItem=None, FromItem=None):
+
+ QueryTab = self.CurrentContent
+ result = [item for item in QueryTab if item[1] == Model and item[-1]>0 ]
+ if Scope1 is not None and Scope1 != TAB_ARCH_COMMON:
+ Sc1 = set(['COMMON'])
+ Sc1.add(Scope1)
+ result = [item for item in result if item[5] in Sc1]
+ Sc2 = set( ['COMMON','DEFAULT'])
+ if Scope2 and Scope2 != TAB_COMMON:
+ if '.' in Scope2:
+ Index = Scope2.index('.')
+ NewScope = TAB_COMMON + Scope2[Index:]
+ Sc2.add(NewScope)
+ Sc2.add(Scope2)
+ result = [item for item in result if item[6] in Sc2]
+
+ if BelongsToItem is not None:
+ result = [item for item in result if item[8] == BelongsToItem]
+ else:
+ result = [item for item in result if item[8] < 0]
+ if FromItem is not None:
+ result = [item for item in result if item[9] == FromItem]
+
+ result = [ [r[2],r[3],r[4],r[5],r[6],r[7],r[0],r[10]] for r in result ]
+ return result
+
+ def DisableComponent(self,comp_id):
+ for item in self.CurrentContent:
+ if item[0] == comp_id or item[8] == comp_id:
+ item[-1] = -1
+
+## Factory class to produce different storage for different type of meta-file
+class MetaFileStorage(object):
+ _FILE_TABLE_ = {
+ MODEL_FILE_INF : ModuleTable,
+ MODEL_FILE_DEC : PackageTable,
+ MODEL_FILE_DSC : PlatformTable,
+ MODEL_FILE_OTHERS : MetaFileTable,
+ }
+
+ _FILE_TYPE_ = {
+ ".inf" : MODEL_FILE_INF,
+ ".dec" : MODEL_FILE_DEC,
+ ".dsc" : MODEL_FILE_DSC,
+ }
+ _ObjectCache = {}
+ ## Constructor
+ def __new__(Class, Cursor, MetaFile, FileType=None, Temporary=False, FromItem=None):
+ # no type given, try to find one
+ key = (MetaFile.Path, FileType,Temporary,FromItem)
+ if key in Class._ObjectCache:
+ return Class._ObjectCache[key]
+ if not FileType:
+ if MetaFile.Type in self._FILE_TYPE_:
+ FileType = Class._FILE_TYPE_[MetaFile.Type]
+ else:
+ FileType = MODEL_FILE_OTHERS
+
+ # don't pass the type around if it's well known
+ if FileType == MODEL_FILE_OTHERS:
+ Args = (Cursor, MetaFile, FileType, Temporary)
+ else:
+ Args = (Cursor, MetaFile, Temporary)
+ if FromItem:
+ Args = Args + (FromItem,)
+
+ # create the storage object and return it to caller
+ reval = Class._FILE_TABLE_[FileType](*Args)
+ if not Temporary:
+ Class._ObjectCache[key] = reval
+ return reval
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/WorkspaceCommon.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
new file mode 100755
index 00000000..c888e6fc
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
@@ -0,0 +1,256 @@
+## @file
+# Common routines used by workspace
+#
+# Copyright (c) 2012 - 2020, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import absolute_import
+from collections import OrderedDict, defaultdict
+from Common.DataType import SUP_MODULE_USER_DEFINED
+from Common.DataType import SUP_MODULE_HOST_APPLICATION
+from .BuildClassObject import LibraryClassObject
+import Common.GlobalData as GlobalData
+from Workspace.BuildClassObject import StructurePcd
+from Common.BuildToolError import RESOURCE_NOT_AVAILABLE
+from Common.BuildToolError import OPTION_MISSING
+from Common.BuildToolError import BUILD_ERROR
+import Common.EdkLogger as EdkLogger
+
+class OrderedListDict(OrderedDict):
+ def __init__(self, *args, **kwargs):
+ super(OrderedListDict, self).__init__(*args, **kwargs)
+ self.default_factory = list
+
+ def __missing__(self, key):
+ self[key] = Value = self.default_factory()
+ return Value
+
+## Get all packages from platform for specified arch, target and toolchain
+#
+# @param Platform: DscBuildData instance
+# @param BuildDatabase: The database saves all data for all metafiles
+# @param Arch: Current arch
+# @param Target: Current target
+# @param Toolchain: Current toolchain
+# @retval: List of packages which are DecBuildData instances
+#
+def GetPackageList(Platform, BuildDatabase, Arch, Target, Toolchain):
+ PkgSet = set()
+ if Platform.Packages:
+ PkgSet.update(Platform.Packages)
+ for ModuleFile in Platform.Modules:
+ Data = BuildDatabase[ModuleFile, Arch, Target, Toolchain]
+ PkgSet.update(Data.Packages)
+ for Lib in GetLiabraryInstances(Data, Platform, BuildDatabase, Arch, Target, Toolchain):
+ PkgSet.update(Lib.Packages)
+ return list(PkgSet)
+
+## Get all declared PCD from platform for specified arch, target and toolchain
+#
+# @param Platform: DscBuildData instance
+# @param BuildDatabase: The database saves all data for all metafiles
+# @param Arch: Current arch
+# @param Target: Current target
+# @param Toolchain: Current toolchain
+# @retval: A dictionary contains instances of PcdClassObject with key (PcdCName, TokenSpaceGuid)
+# @retval: A dictionary contains real GUIDs of TokenSpaceGuid
+#
+def GetDeclaredPcd(Platform, BuildDatabase, Arch, Target, Toolchain, additionalPkgs):
+ PkgList = GetPackageList(Platform, BuildDatabase, Arch, Target, Toolchain)
+ PkgList = set(PkgList)
+ PkgList |= additionalPkgs
+ DecPcds = {}
+ GuidDict = {}
+ for Pkg in PkgList:
+ Guids = Pkg.Guids
+ GuidDict.update(Guids)
+ for Pcd in Pkg.Pcds:
+ PcdCName = Pcd[0]
+ PcdTokenName = Pcd[1]
+ if GlobalData.MixedPcd:
+ for PcdItem in GlobalData.MixedPcd:
+ if (PcdCName, PcdTokenName) in GlobalData.MixedPcd[PcdItem]:
+ PcdCName = PcdItem[0]
+ break
+ if (PcdCName, PcdTokenName) not in DecPcds:
+ DecPcds[PcdCName, PcdTokenName] = Pkg.Pcds[Pcd]
+ return DecPcds, GuidDict
+
+## Get all dependent libraries for a module
+#
+# @param Module: InfBuildData instance
+# @param Platform: DscBuildData instance
+# @param BuildDatabase: The database saves all data for all metafiles
+# @param Arch: Current arch
+# @param Target: Current target
+# @param Toolchain: Current toolchain
+# @retval: List of dependent libraries which are InfBuildData instances
+#
+def GetLiabraryInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain):
+ return GetModuleLibInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain,Platform.MetaFile,EdkLogger)
+
+def GetModuleLibInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain, FileName = '', EdkLogger = None):
+ if Module.LibInstances:
+ return Module.LibInstances
+ ModuleType = Module.ModuleType
+
+ # add forced library instances (specified under LibraryClasses sections)
+ #
+ # If a module has a MODULE_TYPE of USER_DEFINED,
+ # do not link in NULL library class instances from the global [LibraryClasses.*] sections.
+ #
+ if Module.ModuleType != SUP_MODULE_USER_DEFINED:
+ for LibraryClass in Platform.LibraryClasses.GetKeys():
+ if LibraryClass.startswith("NULL") and Platform.LibraryClasses[LibraryClass, Module.ModuleType]:
+ Module.LibraryClasses[LibraryClass] = Platform.LibraryClasses[LibraryClass, Module.ModuleType]
+
+ # add forced library instances (specified in module overrides)
+ for LibraryClass in Platform.Modules[str(Module)].LibraryClasses:
+ if LibraryClass.startswith("NULL"):
+ Module.LibraryClasses[LibraryClass] = Platform.Modules[str(Module)].LibraryClasses[LibraryClass]
+
+ # EdkII module
+ LibraryConsumerList = [Module]
+ Constructor = []
+ ConsumedByList = OrderedListDict()
+ LibraryInstance = OrderedDict()
+
+ if not Module.LibraryClass:
+ EdkLogger.verbose("")
+ EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), Arch))
+
+ while len(LibraryConsumerList) > 0:
+ M = LibraryConsumerList.pop()
+ for LibraryClassName in M.LibraryClasses:
+ if LibraryClassName not in LibraryInstance:
+ # override library instance for this module
+ LibraryPath = Platform.Modules[str(Module)].LibraryClasses.get(LibraryClassName,Platform.LibraryClasses[LibraryClassName, ModuleType])
+ if LibraryPath is None:
+ LibraryPath = M.LibraryClasses.get(LibraryClassName)
+ if LibraryPath is None:
+ if not Module.LibraryClass:
+ EdkLogger.error("build", RESOURCE_NOT_AVAILABLE,
+ "Instance of library class [%s] is not found" % LibraryClassName,
+ File=FileName,
+ ExtraData="in [%s] [%s]\n\tconsumed by module [%s]" % (str(M), Arch, str(Module)))
+ else:
+ return []
+
+ LibraryModule = BuildDatabase[LibraryPath, Arch, Target, Toolchain]
+ # for those forced library instance (NULL library), add a fake library class
+ if LibraryClassName.startswith("NULL"):
+ LibraryModule.LibraryClass.append(LibraryClassObject(LibraryClassName, [ModuleType]))
+ elif LibraryModule.LibraryClass is None \
+ or len(LibraryModule.LibraryClass) == 0 \
+ or (ModuleType != SUP_MODULE_USER_DEFINED and ModuleType != SUP_MODULE_HOST_APPLICATION
+ and ModuleType not in LibraryModule.LibraryClass[0].SupModList):
+ # only USER_DEFINED can link against any library instance despite of its SupModList
+ if not Module.LibraryClass:
+ EdkLogger.error("build", OPTION_MISSING,
+ "Module type [%s] is not supported by library instance [%s]" \
+ % (ModuleType, LibraryPath), File=FileName,
+ ExtraData="consumed by [%s]" % str(Module))
+ else:
+ return []
+
+ LibraryInstance[LibraryClassName] = LibraryModule
+ LibraryConsumerList.append(LibraryModule)
+ if not Module.LibraryClass:
+ EdkLogger.verbose("\t" + str(LibraryClassName) + " : " + str(LibraryModule))
+ else:
+ LibraryModule = LibraryInstance[LibraryClassName]
+
+ if LibraryModule is None:
+ continue
+
+ if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:
+ Constructor.append(LibraryModule)
+
+ # don't add current module itself to consumer list
+ if M != Module:
+ if M in ConsumedByList[LibraryModule]:
+ continue
+ ConsumedByList[LibraryModule].append(M)
+ #
+ # Initialize the sorted output list to the empty set
+ #
+ SortedLibraryList = []
+ #
+ # Q <- Set of all nodes with no incoming edges
+ #
+ LibraryList = [] #LibraryInstance.values()
+ Q = []
+ for LibraryClassName in LibraryInstance:
+ M = LibraryInstance[LibraryClassName]
+ LibraryList.append(M)
+ if not ConsumedByList[M]:
+ Q.append(M)
+
+ #
+ # start the DAG algorithm
+ #
+ while True:
+ EdgeRemoved = True
+ while Q == [] and EdgeRemoved:
+ EdgeRemoved = False
+ # for each node Item with a Constructor
+ for Item in LibraryList:
+ if Item not in Constructor:
+ continue
+ # for each Node without a constructor with an edge e from Item to Node
+ for Node in ConsumedByList[Item]:
+ if Node in Constructor:
+ continue
+ # remove edge e from the graph if Node has no constructor
+ ConsumedByList[Item].remove(Node)
+ EdgeRemoved = True
+ if not ConsumedByList[Item]:
+ # insert Item into Q
+ Q.insert(0, Item)
+ break
+ if Q != []:
+ break
+ # DAG is done if there's no more incoming edge for all nodes
+ if Q == []:
+ break
+
+ # remove node from Q
+ Node = Q.pop()
+ # output Node
+ SortedLibraryList.append(Node)
+
+ # for each node Item with an edge e from Node to Item do
+ for Item in LibraryList:
+ if Node not in ConsumedByList[Item]:
+ continue
+ # remove edge e from the graph
+ ConsumedByList[Item].remove(Node)
+
+ if ConsumedByList[Item]:
+ continue
+ # insert Item into Q, if Item has no other incoming edges
+ Q.insert(0, Item)
+
+ #
+ # if any remaining node Item in the graph has a constructor and an incoming edge, then the graph has a cycle
+ #
+ for Item in LibraryList:
+ if ConsumedByList[Item] and Item in Constructor and len(Constructor) > 1:
+ if not Module.LibraryClass:
+ ErrorMessage = "\tconsumed by " + "\n\tconsumed by ".join(str(L) for L in ConsumedByList[Item])
+ EdkLogger.error("build", BUILD_ERROR, 'Library [%s] with constructors has a cycle' % str(Item),
+ ExtraData=ErrorMessage, File=FileName)
+ else:
+ return []
+ if Item not in SortedLibraryList:
+ SortedLibraryList.append(Item)
+
+ #
+ # Build the list of constructor and destructor names
+ # The DAG Topo sort produces the destructor order, so the list of constructors must generated in the reverse order
+ #
+ SortedLibraryList.reverse()
+ Module.LibInstances = SortedLibraryList
+ SortedLibraryList = [lib.SetReferenceModule(Module) for lib in SortedLibraryList]
+ return SortedLibraryList
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
new file mode 100755
index 00000000..0764e272
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
@@ -0,0 +1,204 @@
+## @file
+# This file is used to create a database used by build tool
+#
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# (C) Copyright 2016 Hewlett Packard Enterprise Development LP<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+from Common.StringUtils import *
+from Common.DataType import *
+from Common.Misc import *
+from types import *
+
+from .MetaDataTable import *
+from .MetaFileTable import *
+from .MetaFileParser import *
+
+from Workspace.DecBuildData import DecBuildData
+from Workspace.DscBuildData import DscBuildData
+from Workspace.InfBuildData import InfBuildData
+
+## Database
+#
+# This class defined the build database for all modules, packages and platform.
+# It will call corresponding parser for the given file if it cannot find it in
+# the database.
+#
+# @param DbPath Path of database file
+# @param GlobalMacros Global macros used for replacement during file parsing
+# @param RenewDb=False Create new database file if it's already there
+#
+class WorkspaceDatabase(object):
+
+ #
+ # internal class used for call corresponding file parser and caching the result
+ # to avoid unnecessary re-parsing
+ #
+ class BuildObjectFactory(object):
+
+ _FILE_TYPE_ = {
+ ".inf" : MODEL_FILE_INF,
+ ".dec" : MODEL_FILE_DEC,
+ ".dsc" : MODEL_FILE_DSC,
+ }
+
+ # file parser
+ _FILE_PARSER_ = {
+ MODEL_FILE_INF : InfParser,
+ MODEL_FILE_DEC : DecParser,
+ MODEL_FILE_DSC : DscParser,
+ }
+
+ # convert to xxxBuildData object
+ _GENERATOR_ = {
+ MODEL_FILE_INF : InfBuildData,
+ MODEL_FILE_DEC : DecBuildData,
+ MODEL_FILE_DSC : DscBuildData,
+ }
+
+ _CACHE_ = {} # (FilePath, Arch) : <object>
+ def GetCache(self):
+ return self._CACHE_
+
+ # constructor
+ def __init__(self, WorkspaceDb):
+ self.WorkspaceDb = WorkspaceDb
+
+ # key = (FilePath, Arch=None)
+ def __contains__(self, Key):
+ FilePath = Key[0]
+ if len(Key) > 1:
+ Arch = Key[1]
+ else:
+ Arch = None
+ return (FilePath, Arch) in self._CACHE_
+
+ # key = (FilePath, Arch=None, Target=None, Toolchain=None)
+ def __getitem__(self, Key):
+ FilePath = Key[0]
+ KeyLength = len(Key)
+ if KeyLength > 1:
+ Arch = Key[1]
+ else:
+ Arch = None
+ if KeyLength > 2:
+ Target = Key[2]
+ else:
+ Target = None
+ if KeyLength > 3:
+ Toolchain = Key[3]
+ else:
+ Toolchain = None
+
+ # if it's generated before, just return the cached one
+ Key = (FilePath, Arch, Target, Toolchain)
+ if Key in self._CACHE_:
+ return self._CACHE_[Key]
+
+ # check file type
+ BuildObject = self.CreateBuildObject(FilePath, Arch, Target, Toolchain)
+ self._CACHE_[Key] = BuildObject
+ return BuildObject
+ def CreateBuildObject(self,FilePath, Arch, Target, Toolchain):
+ Ext = FilePath.Type
+ if Ext not in self._FILE_TYPE_:
+ return None
+ FileType = self._FILE_TYPE_[Ext]
+ if FileType not in self._GENERATOR_:
+ return None
+
+ # get the parser ready for this file
+ MetaFile = self._FILE_PARSER_[FileType](
+ FilePath,
+ FileType,
+ Arch,
+ MetaFileStorage(self.WorkspaceDb, FilePath, FileType)
+ )
+ # always do post-process, in case of macros change
+ MetaFile.DoPostProcess()
+ # object the build is based on
+ BuildObject = self._GENERATOR_[FileType](
+ FilePath,
+ MetaFile,
+ self,
+ Arch,
+ Target,
+ Toolchain
+ )
+ return BuildObject
+
+ # placeholder for file format conversion
+ class TransformObjectFactory:
+ def __init__(self, WorkspaceDb):
+ self.WorkspaceDb = WorkspaceDb
+
+ # key = FilePath, Arch
+ def __getitem__(self, Key):
+ pass
+
+ ## Constructor of WorkspaceDatabase
+ #
+ # @param DbPath Path of database file
+ # @param GlobalMacros Global macros used for replacement during file parsing
+ # @param RenewDb=False Create new database file if it's already there
+ #
+ def __init__(self):
+ self.DB = dict()
+ # create table for internal uses
+ self.TblDataModel = DataClass.MODEL_LIST
+ self.TblFile = []
+ self.Platform = None
+
+ # conversion object for build or file format conversion purpose
+ self.BuildObject = WorkspaceDatabase.BuildObjectFactory(self)
+ self.TransformObject = WorkspaceDatabase.TransformObjectFactory(self)
+
+
+ ## Summarize all packages in the database
+ def GetPackageList(self, Platform, Arch, TargetName, ToolChainTag):
+ self.Platform = Platform
+ PackageList = []
+ Pa = self.BuildObject[self.Platform, Arch, TargetName, ToolChainTag]
+ #
+ # Get Package related to Modules
+ #
+ for Module in Pa.Modules:
+ ModuleObj = self.BuildObject[Module, Arch, TargetName, ToolChainTag]
+ for Package in ModuleObj.Packages:
+ if Package not in PackageList:
+ PackageList.append(Package)
+ #
+ # Get Packages related to Libraries
+ #
+ for Lib in Pa.LibraryInstances:
+ LibObj = self.BuildObject[Lib, Arch, TargetName, ToolChainTag]
+ for Package in LibObj.Packages:
+ if Package not in PackageList:
+ PackageList.append(Package)
+ for Package in Pa.Packages:
+ if Package in PackageList:
+ continue
+ PackageList.append(Package)
+
+ return PackageList
+
+ def MapPlatform(self, Dscfile):
+ Platform = self.BuildObject[PathClass(Dscfile), TAB_COMMON]
+ if Platform is None:
+ EdkLogger.error('build', PARSER_ERROR, "Failed to parser DSC file: %s" % Dscfile)
+ return Platform
+
+BuildDB = WorkspaceDatabase()
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/__init__.py
new file mode 100644
index 00000000..4183055e
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Workspace/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'Workspace' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/basetool_tiano_python_path_env.yaml b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/basetool_tiano_python_path_env.yaml
new file mode 100644
index 00000000..f2a60b34
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/basetool_tiano_python_path_env.yaml
@@ -0,0 +1,11 @@
+## @file
+# Add this folder to the pypath so modules can be easily
+# loaded
+#
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+##
+{
+ "scope": "global",
+ "flags": ["set_pypath"]
+}
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/BuildReport.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/BuildReport.py
new file mode 100755
index 00000000..45edeabd
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/BuildReport.py
@@ -0,0 +1,2316 @@
+## @file
+# Routines for generating build report.
+#
+# This module contains the functionality to generate build report after
+# build all target completes successfully.
+#
+# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+import Common.LongFilePathOs as os
+import re
+import platform
+import textwrap
+import traceback
+import sys
+import time
+import struct
+import hashlib
+import subprocess
+import threading
+from datetime import datetime
+from io import BytesIO
+from Common import EdkLogger
+from Common.Misc import SaveFileOnChange
+from Common.Misc import GuidStructureByteArrayToGuidString
+from Common.Misc import GuidStructureStringToGuidString
+from Common.BuildToolError import FILE_WRITE_FAILURE
+from Common.BuildToolError import CODE_ERROR
+from Common.BuildToolError import COMMAND_FAILURE
+from Common.BuildToolError import FORMAT_INVALID
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+import Common.GlobalData as GlobalData
+from AutoGen.ModuleAutoGen import ModuleAutoGen
+from Common.Misc import PathClass
+from Common.StringUtils import NormPath
+from Common.DataType import *
+import collections
+from Common.Expression import *
+from GenFds.AprioriSection import DXE_APRIORI_GUID, PEI_APRIORI_GUID
+
+## Pattern to extract contents in EDK DXS files
+gDxsDependencyPattern = re.compile(r"DEPENDENCY_START(.+)DEPENDENCY_END", re.DOTALL)
+
+## Pattern to find total FV total size, occupied size in flash report intermediate file
+gFvTotalSizePattern = re.compile(r"EFI_FV_TOTAL_SIZE = (0x[0-9a-fA-F]+)")
+gFvTakenSizePattern = re.compile(r"EFI_FV_TAKEN_SIZE = (0x[0-9a-fA-F]+)")
+
+## Pattern to find module size and time stamp in module summary report intermediate file
+gModuleSizePattern = re.compile(r"MODULE_SIZE = (\d+)")
+gTimeStampPattern = re.compile(r"TIME_STAMP = (\d+)")
+
+## Pattern to find GUID value in flash description files
+gPcdGuidPattern = re.compile(r"PCD\((\w+)[.](\w+)\)")
+
+## Pattern to collect offset, GUID value pair in the flash report intermediate file
+gOffsetGuidPattern = re.compile(r"(0x[0-9A-Fa-f]+) ([-A-Fa-f0-9]+)")
+
+## Pattern to find module base address and entry point in fixed flash map file
+gModulePattern = r"\n[-\w]+\s*\(([^,]+),\s*BaseAddress=%(Address)s,\s*EntryPoint=%(Address)s,\s*Type=\w+\)\s*\(GUID=([-0-9A-Fa-f]+)[^)]*\)"
+gMapFileItemPattern = re.compile(gModulePattern % {"Address" : "(-?0[xX][0-9A-Fa-f]+)"})
+
+## Pattern to find all module referenced header files in source files
+gIncludePattern = re.compile(r'#include\s*["<]([^">]+)[">]')
+gIncludePattern2 = re.compile(r"#include\s+EFI_([A-Z_]+)\s*[(]\s*(\w+)\s*[)]")
+
+## Pattern to find the entry point for EDK module using EDKII Glue library
+gGlueLibEntryPoint = re.compile(r"__EDKII_GLUE_MODULE_ENTRY_POINT__\s*=\s*(\w+)")
+
+## Tags for MaxLength of line in report
+gLineMaxLength = 120
+
+## Tags for end of line in report
+gEndOfLine = "\r\n"
+
+## Tags for section start, end and separator
+gSectionStart = ">" + "=" * (gLineMaxLength - 2) + "<"
+gSectionEnd = "<" + "=" * (gLineMaxLength - 2) + ">" + "\n"
+gSectionSep = "=" * gLineMaxLength
+
+## Tags for subsection start, end and separator
+gSubSectionStart = ">" + "-" * (gLineMaxLength - 2) + "<"
+gSubSectionEnd = "<" + "-" * (gLineMaxLength - 2) + ">"
+gSubSectionSep = "-" * gLineMaxLength
+
+
+## The look up table to map PCD type to pair of report display type and DEC type
+gPcdTypeMap = {
+ TAB_PCDS_FIXED_AT_BUILD : ('FIXED', TAB_PCDS_FIXED_AT_BUILD),
+ TAB_PCDS_PATCHABLE_IN_MODULE: ('PATCH', TAB_PCDS_PATCHABLE_IN_MODULE),
+ TAB_PCDS_FEATURE_FLAG : ('FLAG', TAB_PCDS_FEATURE_FLAG),
+ TAB_PCDS_DYNAMIC : ('DYN', TAB_PCDS_DYNAMIC),
+ TAB_PCDS_DYNAMIC_HII : ('DYNHII', TAB_PCDS_DYNAMIC),
+ TAB_PCDS_DYNAMIC_VPD : ('DYNVPD', TAB_PCDS_DYNAMIC),
+ TAB_PCDS_DYNAMIC_EX : ('DEX', TAB_PCDS_DYNAMIC_EX),
+ TAB_PCDS_DYNAMIC_EX_HII : ('DEXHII', TAB_PCDS_DYNAMIC_EX),
+ TAB_PCDS_DYNAMIC_EX_VPD : ('DEXVPD', TAB_PCDS_DYNAMIC_EX),
+ }
+
+## The look up table to map module type to driver type
+gDriverTypeMap = {
+ SUP_MODULE_SEC : '0x3 (SECURITY_CORE)',
+ SUP_MODULE_PEI_CORE : '0x4 (PEI_CORE)',
+ SUP_MODULE_PEIM : '0x6 (PEIM)',
+ SUP_MODULE_DXE_CORE : '0x5 (DXE_CORE)',
+ SUP_MODULE_DXE_DRIVER : '0x7 (DRIVER)',
+ SUP_MODULE_DXE_SAL_DRIVER : '0x7 (DRIVER)',
+ SUP_MODULE_DXE_SMM_DRIVER : '0x7 (DRIVER)',
+ SUP_MODULE_DXE_RUNTIME_DRIVER: '0x7 (DRIVER)',
+ SUP_MODULE_UEFI_DRIVER : '0x7 (DRIVER)',
+ SUP_MODULE_UEFI_APPLICATION : '0x9 (APPLICATION)',
+ SUP_MODULE_SMM_CORE : '0xD (SMM_CORE)',
+ 'SMM_DRIVER' : '0xA (SMM)', # Extension of module type to support PI 1.1 SMM drivers
+ SUP_MODULE_MM_STANDALONE : '0xE (MM_STANDALONE)',
+ SUP_MODULE_MM_CORE_STANDALONE : '0xF (MM_CORE_STANDALONE)'
+ }
+
+## The look up table of the supported opcode in the dependency expression binaries
+gOpCodeList = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "TRUE", "FALSE", "END", "SOR"]
+
+## Save VPD Pcd
+VPDPcdList = []
+
+##
+# Writes a string to the file object.
+#
+# This function writes a string to the file object and a new line is appended
+# afterwards. It may optionally wraps the string for better readability.
+#
+# @File The file object to write
+# @String The string to be written to the file
+# @Wrapper Indicates whether to wrap the string
+#
+def FileWrite(File, String, Wrapper=False):
+ if Wrapper:
+ String = textwrap.fill(String, 120)
+ File.append(String + gEndOfLine)
+
+def ByteArrayForamt(Value):
+ IsByteArray = False
+ SplitNum = 16
+ ArrayList = []
+ if Value.startswith('{') and Value.endswith('}') and not Value.startswith("{CODE("):
+ Value = Value[1:-1]
+ ValueList = Value.split(',')
+ if len(ValueList) >= SplitNum:
+ IsByteArray = True
+ if IsByteArray:
+ if ValueList:
+ Len = len(ValueList)/SplitNum
+ for i, element in enumerate(ValueList):
+ ValueList[i] = '0x%02X' % int(element.strip(), 16)
+ if Len:
+ Id = 0
+ while (Id <= Len):
+ End = min(SplitNum*(Id+1), len(ValueList))
+ Str = ','.join(ValueList[SplitNum*Id : End])
+ if End == len(ValueList):
+ Str += '}'
+ ArrayList.append(Str)
+ break
+ else:
+ Str += ','
+ ArrayList.append(Str)
+ Id += 1
+ else:
+ ArrayList = [Value + '}']
+ return IsByteArray, ArrayList
+
+##
+# Find all the header file that the module source directly includes.
+#
+# This function scans source code to find all header files the module may
+# include. This is not accurate but very effective to find all the header
+# file the module might include with #include statement.
+#
+# @Source The source file name
+# @IncludePathList The list of include path to find the source file.
+# @IncludeFiles The dictionary of current found include files.
+#
+def FindIncludeFiles(Source, IncludePathList, IncludeFiles):
+ FileContents = open(Source).read()
+ #
+ # Find header files with pattern #include "XXX.h" or #include <XXX.h>
+ #
+ for Match in gIncludePattern.finditer(FileContents):
+ FileName = Match.group(1).strip()
+ for Dir in [os.path.dirname(Source)] + IncludePathList:
+ FullFileName = os.path.normpath(os.path.join(Dir, FileName))
+ if os.path.exists(FullFileName):
+ IncludeFiles[FullFileName.lower().replace("\\", "/")] = FullFileName
+ break
+
+ #
+ # Find header files with pattern like #include EFI_PPI_CONSUMER(XXX)
+ #
+ for Match in gIncludePattern2.finditer(FileContents):
+ Key = Match.group(2)
+ Type = Match.group(1)
+ if "ARCH_PROTOCOL" in Type:
+ FileName = "ArchProtocol/%(Key)s/%(Key)s.h" % {"Key" : Key}
+ elif "PROTOCOL" in Type:
+ FileName = "Protocol/%(Key)s/%(Key)s.h" % {"Key" : Key}
+ elif "PPI" in Type:
+ FileName = "Ppi/%(Key)s/%(Key)s.h" % {"Key" : Key}
+ elif TAB_GUID in Type:
+ FileName = "Guid/%(Key)s/%(Key)s.h" % {"Key" : Key}
+ else:
+ continue
+ for Dir in IncludePathList:
+ FullFileName = os.path.normpath(os.path.join(Dir, FileName))
+ if os.path.exists(FullFileName):
+ IncludeFiles[FullFileName.lower().replace("\\", "/")] = FullFileName
+ break
+
+## Split each lines in file
+#
+# This method is used to split the lines in file to make the length of each line
+# less than MaxLength.
+#
+# @param Content The content of file
+# @param MaxLength The Max Length of the line
+#
+def FileLinesSplit(Content=None, MaxLength=None):
+ ContentList = Content.split(TAB_LINE_BREAK)
+ NewContent = ''
+ NewContentList = []
+ for Line in ContentList:
+ while len(Line.rstrip()) > MaxLength:
+ LineSpaceIndex = Line.rfind(TAB_SPACE_SPLIT, 0, MaxLength)
+ LineSlashIndex = Line.rfind(TAB_SLASH, 0, MaxLength)
+ LineBackSlashIndex = Line.rfind(TAB_BACK_SLASH, 0, MaxLength)
+ if max(LineSpaceIndex, LineSlashIndex, LineBackSlashIndex) > 0:
+ LineBreakIndex = max(LineSpaceIndex, LineSlashIndex, LineBackSlashIndex)
+ else:
+ LineBreakIndex = MaxLength
+ NewContentList.append(Line[:LineBreakIndex])
+ Line = Line[LineBreakIndex:]
+ if Line:
+ NewContentList.append(Line)
+ for NewLine in NewContentList:
+ NewContent += NewLine + TAB_LINE_BREAK
+
+ NewContent = NewContent.replace(gEndOfLine, TAB_LINE_BREAK).replace('\r\r\n', gEndOfLine)
+ return NewContent
+
+
+
+##
+# Parse binary dependency expression section
+#
+# This utility class parses the dependency expression section and translate the readable
+# GUID name and value.
+#
+class DepexParser(object):
+ ##
+ # Constructor function for class DepexParser
+ #
+ # This constructor function collect GUID values so that the readable
+ # GUID name can be translated.
+ #
+ # @param self The object pointer
+ # @param Wa Workspace context information
+ #
+ def __init__(self, Wa):
+ self._GuidDb = {}
+ for Pa in Wa.AutoGenObjectList:
+ for Package in Pa.PackageList:
+ for Protocol in Package.Protocols:
+ GuidValue = GuidStructureStringToGuidString(Package.Protocols[Protocol])
+ self._GuidDb[GuidValue.upper()] = Protocol
+ for Ppi in Package.Ppis:
+ GuidValue = GuidStructureStringToGuidString(Package.Ppis[Ppi])
+ self._GuidDb[GuidValue.upper()] = Ppi
+ for Guid in Package.Guids:
+ GuidValue = GuidStructureStringToGuidString(Package.Guids[Guid])
+ self._GuidDb[GuidValue.upper()] = Guid
+ for Ma in Pa.ModuleAutoGenList:
+ for Pcd in Ma.FixedVoidTypePcds:
+ PcdValue = Ma.FixedVoidTypePcds[Pcd]
+ if len(PcdValue.split(',')) == 16:
+ GuidValue = GuidStructureByteArrayToGuidString(PcdValue)
+ self._GuidDb[GuidValue.upper()] = Pcd
+ ##
+ # Parse the binary dependency expression files.
+ #
+ # This function parses the binary dependency expression file and translate it
+ # to the instruction list.
+ #
+ # @param self The object pointer
+ # @param DepexFileName The file name of binary dependency expression file.
+ #
+ def ParseDepexFile(self, DepexFileName):
+ DepexFile = open(DepexFileName, "rb")
+ DepexStatement = []
+ OpCode = DepexFile.read(1)
+ while OpCode:
+ Statement = gOpCodeList[struct.unpack("B", OpCode)[0]]
+ if Statement in ["BEFORE", "AFTER", "PUSH"]:
+ GuidValue = "%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X" % \
+ struct.unpack(PACK_PATTERN_GUID, DepexFile.read(16))
+ GuidString = self._GuidDb.get(GuidValue, GuidValue)
+ Statement = "%s %s" % (Statement, GuidString)
+ DepexStatement.append(Statement)
+ OpCode = DepexFile.read(1)
+
+ return DepexStatement
+
+##
+# Reports library information
+#
+# This class reports the module library subsection in the build report file.
+#
+class LibraryReport(object):
+ ##
+ # Constructor function for class LibraryReport
+ #
+ # This constructor function generates LibraryReport object for
+ # a module.
+ #
+ # @param self The object pointer
+ # @param M Module context information
+ #
+ def __init__(self, M):
+ self.LibraryList = []
+
+ for Lib in M.DependentLibraryList:
+ LibInfPath = str(Lib)
+ LibClassList = Lib.LibraryClass[0].LibraryClass
+ LibConstructorList = Lib.ConstructorList
+ LibDesstructorList = Lib.DestructorList
+ LibDepexList = Lib.DepexExpression[M.Arch, M.ModuleType]
+ for LibAutoGen in M.LibraryAutoGenList:
+ if LibInfPath == LibAutoGen.MetaFile.Path:
+ LibTime = LibAutoGen.BuildTime
+ break
+ self.LibraryList.append((LibInfPath, LibClassList, LibConstructorList, LibDesstructorList, LibDepexList, LibTime))
+
+ ##
+ # Generate report for module library information
+ #
+ # This function generates report for the module library.
+ # If the module is EDKII style one, the additional library class, library
+ # constructor/destructor and dependency expression may also be reported.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ #
+ def GenerateReport(self, File):
+ if len(self.LibraryList) > 0:
+ FileWrite(File, gSubSectionStart)
+ FileWrite(File, TAB_BRG_LIBRARY)
+ FileWrite(File, gSubSectionSep)
+ for LibraryItem in self.LibraryList:
+ LibInfPath = LibraryItem[0]
+ FileWrite(File, LibInfPath)
+
+ LibClass = LibraryItem[1]
+ EdkIILibInfo = ""
+ LibConstructor = " ".join(LibraryItem[2])
+ if LibConstructor:
+ EdkIILibInfo += " C = " + LibConstructor
+ LibDestructor = " ".join(LibraryItem[3])
+ if LibDestructor:
+ EdkIILibInfo += " D = " + LibDestructor
+ LibDepex = " ".join(LibraryItem[4])
+ if LibDepex:
+ EdkIILibInfo += " Depex = " + LibDepex
+ if LibraryItem[5]:
+ EdkIILibInfo += " Time = " + LibraryItem[5]
+ if EdkIILibInfo:
+ FileWrite(File, "{%s: %s}" % (LibClass, EdkIILibInfo))
+ else:
+ FileWrite(File, "{%s}" % LibClass)
+
+ FileWrite(File, gSubSectionEnd)
+
+##
+# Reports dependency expression information
+#
+# This class reports the module dependency expression subsection in the build report file.
+#
+class DepexReport(object):
+ ##
+ # Constructor function for class DepexReport
+ #
+ # This constructor function generates DepexReport object for
+ # a module. If the module source contains the DXS file (usually EDK
+ # style module), it uses the dependency in DXS file; otherwise,
+ # it uses the dependency expression from its own INF [Depex] section
+ # and then merges with the ones from its dependent library INF.
+ #
+ # @param self The object pointer
+ # @param M Module context information
+ #
+ def __init__(self, M):
+ self.Depex = ""
+ self._DepexFileName = os.path.join(M.BuildDir, "OUTPUT", M.Module.BaseName + ".depex")
+ ModuleType = M.ModuleType
+ if not ModuleType:
+ ModuleType = COMPONENT_TO_MODULE_MAP_DICT.get(M.ComponentType, "")
+
+ if ModuleType in [SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_DXE_CORE, SUP_MODULE_SMM_CORE, SUP_MODULE_MM_CORE_STANDALONE, SUP_MODULE_UEFI_APPLICATION]:
+ return
+
+ for Source in M.SourceFileList:
+ if os.path.splitext(Source.Path)[1].lower() == ".dxs":
+ Match = gDxsDependencyPattern.search(open(Source.Path).read())
+ if Match:
+ self.Depex = Match.group(1).strip()
+ self.Source = "DXS"
+ break
+ else:
+ self.Depex = M.DepexExpressionDict.get(M.ModuleType, "")
+ self.ModuleDepex = " ".join(M.Module.DepexExpression[M.Arch, M.ModuleType])
+ if not self.ModuleDepex:
+ self.ModuleDepex = "(None)"
+
+ LibDepexList = []
+ for Lib in M.DependentLibraryList:
+ LibDepex = " ".join(Lib.DepexExpression[M.Arch, M.ModuleType]).strip()
+ if LibDepex != "":
+ LibDepexList.append("(" + LibDepex + ")")
+ self.LibraryDepex = " AND ".join(LibDepexList)
+ if not self.LibraryDepex:
+ self.LibraryDepex = "(None)"
+ self.Source = "INF"
+
+ ##
+ # Generate report for module dependency expression information
+ #
+ # This function generates report for the module dependency expression.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ # @param GlobalDepexParser The platform global Dependency expression parser object
+ #
+ def GenerateReport(self, File, GlobalDepexParser):
+ if not self.Depex:
+ return
+ FileWrite(File, gSubSectionStart)
+ if os.path.isfile(self._DepexFileName):
+ try:
+ DepexStatements = GlobalDepexParser.ParseDepexFile(self._DepexFileName)
+ FileWrite(File, "Final Dependency Expression (DEPEX) Instructions")
+ for DepexStatement in DepexStatements:
+ FileWrite(File, " %s" % DepexStatement)
+ FileWrite(File, gSubSectionSep)
+ except:
+ EdkLogger.warn(None, "Dependency expression file is corrupted", self._DepexFileName)
+
+ FileWrite(File, "Dependency Expression (DEPEX) from %s" % self.Source)
+
+ if self.Source == "INF":
+ FileWrite(File, self.Depex, True)
+ FileWrite(File, gSubSectionSep)
+ FileWrite(File, "From Module INF: %s" % self.ModuleDepex, True)
+ FileWrite(File, "From Library INF: %s" % self.LibraryDepex, True)
+ else:
+ FileWrite(File, self.Depex)
+ FileWrite(File, gSubSectionEnd)
+
+##
+# Reports dependency expression information
+#
+# This class reports the module build flags subsection in the build report file.
+#
+class BuildFlagsReport(object):
+ ##
+ # Constructor function for class BuildFlagsReport
+ #
+ # This constructor function generates BuildFlagsReport object for
+ # a module. It reports the build tool chain tag and all relevant
+ # build flags to build the module.
+ #
+ # @param self The object pointer
+ # @param M Module context information
+ #
+ def __init__(self, M):
+ BuildOptions = {}
+ #
+ # Add build flags according to source file extension so that
+ # irrelevant ones can be filtered out.
+ #
+ for Source in M.SourceFileList:
+ Ext = os.path.splitext(Source.File)[1].lower()
+ if Ext in [".c", ".cc", ".cpp"]:
+ BuildOptions["CC"] = 1
+ elif Ext in [".s", ".asm"]:
+ BuildOptions["PP"] = 1
+ BuildOptions["ASM"] = 1
+ elif Ext in [".vfr"]:
+ BuildOptions["VFRPP"] = 1
+ BuildOptions["VFR"] = 1
+ elif Ext in [".dxs"]:
+ BuildOptions["APP"] = 1
+ BuildOptions["CC"] = 1
+ elif Ext in [".asl"]:
+ BuildOptions["ASLPP"] = 1
+ BuildOptions["ASL"] = 1
+ elif Ext in [".aslc"]:
+ BuildOptions["ASLCC"] = 1
+ BuildOptions["ASLDLINK"] = 1
+ BuildOptions["CC"] = 1
+ elif Ext in [".asm16"]:
+ BuildOptions["ASMLINK"] = 1
+ BuildOptions["SLINK"] = 1
+ BuildOptions["DLINK"] = 1
+
+ #
+ # Save module build flags.
+ #
+ self.ToolChainTag = M.ToolChain
+ self.BuildFlags = {}
+ for Tool in BuildOptions:
+ self.BuildFlags[Tool + "_FLAGS"] = M.BuildOption.get(Tool, {}).get("FLAGS", "")
+
+ ##
+ # Generate report for module build flags information
+ #
+ # This function generates report for the module build flags expression.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ #
+ def GenerateReport(self, File):
+ FileWrite(File, gSubSectionStart)
+ FileWrite(File, "Build Flags")
+ FileWrite(File, "Tool Chain Tag: %s" % self.ToolChainTag)
+ for Tool in self.BuildFlags:
+ FileWrite(File, gSubSectionSep)
+ FileWrite(File, "%s = %s" % (Tool, self.BuildFlags[Tool]), True)
+
+ FileWrite(File, gSubSectionEnd)
+
+
+##
+# Reports individual module information
+#
+# This class reports the module section in the build report file.
+# It comprises of module summary, module PCD, library, dependency expression,
+# build flags sections.
+#
+class ModuleReport(object):
+ ##
+ # Constructor function for class ModuleReport
+ #
+ # This constructor function generates ModuleReport object for
+ # a separate module in a platform build.
+ #
+ # @param self The object pointer
+ # @param M Module context information
+ # @param ReportType The kind of report items in the final report file
+ #
+ def __init__(self, M, ReportType):
+ self.ModuleName = M.Module.BaseName
+ self.ModuleInfPath = M.MetaFile.File
+ self.ModuleArch = M.Arch
+ self.FileGuid = M.Guid
+ self.Size = 0
+ self.BuildTimeStamp = None
+ self.Hash = 0
+ self.DriverType = ""
+ if not M.IsLibrary:
+ ModuleType = M.ModuleType
+ if not ModuleType:
+ ModuleType = COMPONENT_TO_MODULE_MAP_DICT.get(M.ComponentType, "")
+ #
+ # If a module complies to PI 1.1, promote Module type to "SMM_DRIVER"
+ #
+ if ModuleType == SUP_MODULE_DXE_SMM_DRIVER:
+ PiSpec = M.Module.Specification.get("PI_SPECIFICATION_VERSION", "0x00010000")
+ if int(PiSpec, 0) >= 0x0001000A:
+ ModuleType = "SMM_DRIVER"
+ self.DriverType = gDriverTypeMap.get(ModuleType, "0x2 (FREE_FORM)")
+ self.UefiSpecVersion = M.Module.Specification.get("UEFI_SPECIFICATION_VERSION", "")
+ self.PiSpecVersion = M.Module.Specification.get("PI_SPECIFICATION_VERSION", "")
+ self.PciDeviceId = M.Module.Defines.get("PCI_DEVICE_ID", "")
+ self.PciVendorId = M.Module.Defines.get("PCI_VENDOR_ID", "")
+ self.PciClassCode = M.Module.Defines.get("PCI_CLASS_CODE", "")
+ self.BuildTime = M.BuildTime
+
+ self._BuildDir = M.BuildDir
+ self.ModulePcdSet = {}
+ if "PCD" in ReportType:
+ #
+ # Collect all module used PCD set: module INF referenced directly or indirectly.
+ # It also saves module INF default values of them in case they exist.
+ #
+ for Pcd in M.ModulePcdList + M.LibraryPcdList:
+ self.ModulePcdSet.setdefault((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Type), (Pcd.InfDefaultValue, Pcd.DefaultValue))
+
+ self.LibraryReport = None
+ if "LIBRARY" in ReportType:
+ self.LibraryReport = LibraryReport(M)
+
+ self.DepexReport = None
+ if "DEPEX" in ReportType:
+ self.DepexReport = DepexReport(M)
+
+ if "BUILD_FLAGS" in ReportType:
+ self.BuildFlagsReport = BuildFlagsReport(M)
+
+
+ ##
+ # Generate report for module information
+ #
+ # This function generates report for separate module expression
+ # in a platform build.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ # @param GlobalPcdReport The platform global PCD report object
+ # @param GlobalPredictionReport The platform global Prediction report object
+ # @param GlobalDepexParser The platform global Dependency expression parser object
+ # @param ReportType The kind of report items in the final report file
+ #
+ def GenerateReport(self, File, GlobalPcdReport, GlobalPredictionReport, GlobalDepexParser, ReportType):
+ FileWrite(File, gSectionStart)
+
+ FwReportFileName = os.path.join(self._BuildDir, "OUTPUT", self.ModuleName + ".txt")
+ if os.path.isfile(FwReportFileName):
+ try:
+ FileContents = open(FwReportFileName).read()
+ Match = gModuleSizePattern.search(FileContents)
+ if Match:
+ self.Size = int(Match.group(1))
+
+ Match = gTimeStampPattern.search(FileContents)
+ if Match:
+ self.BuildTimeStamp = datetime.utcfromtimestamp(int(Match.group(1)))
+ except IOError:
+ EdkLogger.warn(None, "Fail to read report file", FwReportFileName)
+
+ if "HASH" in ReportType:
+ OutputDir = os.path.join(self._BuildDir, "OUTPUT")
+ DefaultEFIfile = os.path.join(OutputDir, self.ModuleName + ".efi")
+ if os.path.isfile(DefaultEFIfile):
+ Tempfile = os.path.join(OutputDir, self.ModuleName + "_hash.tmp")
+ # rebase the efi image since its base address may not zero
+ cmd = ["GenFw", "--rebase", str(0), "-o", Tempfile, DefaultEFIfile]
+ try:
+ PopenObject = subprocess.Popen(' '.join(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ except Exception as X:
+ EdkLogger.error("GenFw", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0]))
+ EndOfProcedure = threading.Event()
+ EndOfProcedure.clear()
+ if PopenObject.stderr:
+ StdErrThread = threading.Thread(target=ReadMessage, args=(PopenObject.stderr, EdkLogger.quiet, EndOfProcedure))
+ StdErrThread.setName("STDERR-Redirector")
+ StdErrThread.setDaemon(False)
+ StdErrThread.start()
+ # waiting for program exit
+ PopenObject.wait()
+ if PopenObject.stderr:
+ StdErrThread.join()
+ if PopenObject.returncode != 0:
+ EdkLogger.error("GenFw", COMMAND_FAILURE, "Failed to generate firmware hash image for %s" % (DefaultEFIfile))
+ if os.path.isfile(Tempfile):
+ self.Hash = hashlib.sha1()
+ buf = open(Tempfile, 'rb').read()
+ if self.Hash.update(buf):
+ self.Hash = self.Hash.update(buf)
+ self.Hash = self.Hash.hexdigest()
+ os.remove(Tempfile)
+
+ FileWrite(File, "Module Summary")
+ FileWrite(File, "Module Name: %s" % self.ModuleName)
+ FileWrite(File, "Module Arch: %s" % self.ModuleArch)
+ FileWrite(File, "Module INF Path: %s" % self.ModuleInfPath)
+ FileWrite(File, "File GUID: %s" % self.FileGuid)
+ if self.Size:
+ FileWrite(File, "Size: 0x%X (%.2fK)" % (self.Size, self.Size / 1024.0))
+ if self.Hash:
+ FileWrite(File, "SHA1 HASH: %s *%s" % (self.Hash, self.ModuleName + ".efi"))
+ if self.BuildTimeStamp:
+ FileWrite(File, "Build Time Stamp: %s" % self.BuildTimeStamp)
+ if self.BuildTime:
+ FileWrite(File, "Module Build Time: %s" % self.BuildTime)
+ if self.DriverType:
+ FileWrite(File, "Driver Type: %s" % self.DriverType)
+ if self.UefiSpecVersion:
+ FileWrite(File, "UEFI Spec Version: %s" % self.UefiSpecVersion)
+ if self.PiSpecVersion:
+ FileWrite(File, "PI Spec Version: %s" % self.PiSpecVersion)
+ if self.PciDeviceId:
+ FileWrite(File, "PCI Device ID: %s" % self.PciDeviceId)
+ if self.PciVendorId:
+ FileWrite(File, "PCI Vendor ID: %s" % self.PciVendorId)
+ if self.PciClassCode:
+ FileWrite(File, "PCI Class Code: %s" % self.PciClassCode)
+
+ FileWrite(File, gSectionSep)
+
+ if "PCD" in ReportType:
+ GlobalPcdReport.GenerateReport(File, self.ModulePcdSet,self.FileGuid)
+
+ if "LIBRARY" in ReportType:
+ self.LibraryReport.GenerateReport(File)
+
+ if "DEPEX" in ReportType:
+ self.DepexReport.GenerateReport(File, GlobalDepexParser)
+
+ if "BUILD_FLAGS" in ReportType:
+ self.BuildFlagsReport.GenerateReport(File)
+
+ if "FIXED_ADDRESS" in ReportType and self.FileGuid:
+ GlobalPredictionReport.GenerateReport(File, self.FileGuid)
+
+ FileWrite(File, gSectionEnd)
+
+def ReadMessage(From, To, ExitFlag):
+ while True:
+ # read one line a time
+ Line = From.readline()
+ # empty string means "end"
+ if Line is not None and Line != b"":
+ To(Line.rstrip().decode(encoding='utf-8', errors='ignore'))
+ else:
+ break
+ if ExitFlag.isSet():
+ break
+
+##
+# Reports platform and module PCD information
+#
+# This class reports the platform PCD section and module PCD subsection
+# in the build report file.
+#
+class PcdReport(object):
+ ##
+ # Constructor function for class PcdReport
+ #
+ # This constructor function generates PcdReport object a platform build.
+ # It collects the whole PCD database from platform DSC files, platform
+ # flash description file and package DEC files.
+ #
+ # @param self The object pointer
+ # @param Wa Workspace context information
+ #
+ def __init__(self, Wa):
+ self.AllPcds = {}
+ self.UnusedPcds = {}
+ self.ConditionalPcds = {}
+ self.MaxLen = 0
+ self.Arch = None
+ if Wa.FdfProfile:
+ self.FdfPcdSet = Wa.FdfProfile.PcdDict
+ else:
+ self.FdfPcdSet = {}
+
+ self.DefaultStoreSingle = True
+ self.SkuSingle = True
+ if GlobalData.gDefaultStores and len(GlobalData.gDefaultStores) > 1:
+ self.DefaultStoreSingle = False
+ if GlobalData.gSkuids and len(GlobalData.gSkuids) > 1:
+ self.SkuSingle = False
+
+ self.ModulePcdOverride = {}
+ for Pa in Wa.AutoGenObjectList:
+ self.Arch = Pa.Arch
+ #
+ # Collect all platform referenced PCDs and grouped them by PCD token space
+ # GUID C Names
+ #
+ for Pcd in Pa.AllPcdList:
+ PcdList = self.AllPcds.setdefault(Pcd.TokenSpaceGuidCName, {}).setdefault(Pcd.Type, [])
+ if Pcd not in PcdList:
+ PcdList.append(Pcd)
+ if len(Pcd.TokenCName) > self.MaxLen:
+ self.MaxLen = len(Pcd.TokenCName)
+ #
+ # Collect the PCD defined in DSC/FDF file, but not used in module
+ #
+ UnusedPcdFullList = []
+ StructPcdDict = GlobalData.gStructurePcd.get(self.Arch, collections.OrderedDict())
+ for Name, Guid in StructPcdDict:
+ if (Name, Guid) not in Pa.Platform.Pcds:
+ Pcd = StructPcdDict[(Name, Guid)]
+ PcdList = self.AllPcds.setdefault(Guid, {}).setdefault(Pcd.Type, [])
+ if Pcd not in PcdList and Pcd not in UnusedPcdFullList:
+ UnusedPcdFullList.append(Pcd)
+ for item in Pa.Platform.Pcds:
+ Pcd = Pa.Platform.Pcds[item]
+ if not Pcd.Type:
+ # check the Pcd in FDF file, whether it is used in module first
+ for T in PCD_TYPE_LIST:
+ PcdList = self.AllPcds.setdefault(Pcd.TokenSpaceGuidCName, {}).setdefault(T, [])
+ if Pcd in PcdList:
+ Pcd.Type = T
+ break
+ if not Pcd.Type:
+ PcdTypeFlag = False
+ for package in Pa.PackageList:
+ for T in PCD_TYPE_LIST:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, T) in package.Pcds:
+ Pcd.Type = T
+ PcdTypeFlag = True
+ if not Pcd.DatumType:
+ Pcd.DatumType = package.Pcds[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName, T)].DatumType
+ break
+ if PcdTypeFlag:
+ break
+ if not Pcd.DatumType:
+ PcdType = Pcd.Type
+ # Try to remove Hii and Vpd suffix
+ if PcdType.startswith(TAB_PCDS_DYNAMIC_EX):
+ PcdType = TAB_PCDS_DYNAMIC_EX
+ elif PcdType.startswith(TAB_PCDS_DYNAMIC):
+ PcdType = TAB_PCDS_DYNAMIC
+ for package in Pa.PackageList:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, PcdType) in package.Pcds:
+ Pcd.DatumType = package.Pcds[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName, PcdType)].DatumType
+ break
+
+ PcdList = self.AllPcds.setdefault(Pcd.TokenSpaceGuidCName, {}).setdefault(Pcd.Type, [])
+ UnusedPcdList = self.UnusedPcds.setdefault(Pcd.TokenSpaceGuidCName, {}).setdefault(Pcd.Type, [])
+ if Pcd in UnusedPcdList:
+ UnusedPcdList.remove(Pcd)
+ if Pcd not in PcdList and Pcd not in UnusedPcdFullList:
+ UnusedPcdFullList.append(Pcd)
+ if len(Pcd.TokenCName) > self.MaxLen:
+ self.MaxLen = len(Pcd.TokenCName)
+
+ if GlobalData.gConditionalPcds:
+ for PcdItem in GlobalData.gConditionalPcds:
+ if '.' in PcdItem:
+ (TokenSpaceGuidCName, TokenCName) = PcdItem.split('.')
+ if (TokenCName, TokenSpaceGuidCName) in Pa.Platform.Pcds:
+ Pcd = Pa.Platform.Pcds[(TokenCName, TokenSpaceGuidCName)]
+ PcdList = self.ConditionalPcds.setdefault(Pcd.TokenSpaceGuidCName, {}).setdefault(Pcd.Type, [])
+ if Pcd not in PcdList:
+ PcdList.append(Pcd)
+
+ UnusedPcdList = []
+ if UnusedPcdFullList:
+ for Pcd in UnusedPcdFullList:
+ if Pcd.TokenSpaceGuidCName + '.' + Pcd.TokenCName in GlobalData.gConditionalPcds:
+ continue
+ UnusedPcdList.append(Pcd)
+
+ for Pcd in UnusedPcdList:
+ PcdList = self.UnusedPcds.setdefault(Pcd.TokenSpaceGuidCName, {}).setdefault(Pcd.Type, [])
+ if Pcd not in PcdList:
+ PcdList.append(Pcd)
+
+ for Module in Pa.Platform.Modules.values():
+ #
+ # Collect module override PCDs
+ #
+ for ModulePcd in Module.M.ModulePcdList + Module.M.LibraryPcdList:
+ TokenCName = ModulePcd.TokenCName
+ TokenSpaceGuid = ModulePcd.TokenSpaceGuidCName
+ ModuleDefault = ModulePcd.DefaultValue
+ ModulePath = os.path.basename(Module.M.MetaFile.File)
+ self.ModulePcdOverride.setdefault((TokenCName, TokenSpaceGuid), {})[ModulePath] = ModuleDefault
+
+
+ #
+ # Collect PCD DEC default value.
+ #
+ self.DecPcdDefault = {}
+ self._GuidDict = {}
+ for Pa in Wa.AutoGenObjectList:
+ for Package in Pa.PackageList:
+ Guids = Package.Guids
+ self._GuidDict.update(Guids)
+ for (TokenCName, TokenSpaceGuidCName, DecType) in Package.Pcds:
+ DecDefaultValue = Package.Pcds[TokenCName, TokenSpaceGuidCName, DecType].DefaultValue
+ self.DecPcdDefault.setdefault((TokenCName, TokenSpaceGuidCName, DecType), DecDefaultValue)
+ #
+ # Collect PCDs defined in DSC common section
+ #
+ self.DscPcdDefault = {}
+ for Pa in Wa.AutoGenObjectList:
+ for (TokenCName, TokenSpaceGuidCName) in Pa.Platform.Pcds:
+ DscDefaultValue = Pa.Platform.Pcds[(TokenCName, TokenSpaceGuidCName)].DscDefaultValue
+ if DscDefaultValue:
+ self.DscPcdDefault[(TokenCName, TokenSpaceGuidCName)] = DscDefaultValue
+
+ def GenerateReport(self, File, ModulePcdSet,ModuleGuid=None):
+ if not ModulePcdSet:
+ if self.ConditionalPcds:
+ self.GenerateReportDetail(File, ModulePcdSet, 1)
+ if self.UnusedPcds:
+ IsEmpty = True
+ for Token in self.UnusedPcds:
+ TokenDict = self.UnusedPcds[Token]
+ for Type in TokenDict:
+ if TokenDict[Type]:
+ IsEmpty = False
+ break
+ if not IsEmpty:
+ break
+ if not IsEmpty:
+ self.GenerateReportDetail(File, ModulePcdSet, 2)
+ self.GenerateReportDetail(File, ModulePcdSet,ModuleGuid = ModuleGuid)
+
+ ##
+ # Generate report for PCD information
+ #
+ # This function generates report for separate module expression
+ # in a platform build.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ # @param ModulePcdSet Set of all PCDs referenced by module or None for
+ # platform PCD report
+ # @param ReportySubType 0 means platform/module PCD report, 1 means Conditional
+ # directives section report, 2 means Unused Pcds section report
+ # @param DscOverridePcds Module DSC override PCDs set
+ #
+ def GenerateReportDetail(self, File, ModulePcdSet, ReportSubType = 0,ModuleGuid=None):
+ PcdDict = self.AllPcds
+ if ReportSubType == 1:
+ PcdDict = self.ConditionalPcds
+ elif ReportSubType == 2:
+ PcdDict = self.UnusedPcds
+
+ if not ModulePcdSet:
+ FileWrite(File, gSectionStart)
+ if ReportSubType == 1:
+ FileWrite(File, "Conditional Directives used by the build system")
+ elif ReportSubType == 2:
+ FileWrite(File, "PCDs not used by modules or in conditional directives")
+ else:
+ FileWrite(File, "Platform Configuration Database Report")
+
+ FileWrite(File, " *B - PCD override in the build option")
+ FileWrite(File, " *P - Platform scoped PCD override in DSC file")
+ FileWrite(File, " *F - Platform scoped PCD override in FDF file")
+ if not ReportSubType:
+ FileWrite(File, " *M - Module scoped PCD override")
+ FileWrite(File, gSectionSep)
+ else:
+ if not ReportSubType and ModulePcdSet:
+ #
+ # For module PCD sub-section
+ #
+ FileWrite(File, gSubSectionStart)
+ FileWrite(File, TAB_BRG_PCD)
+ FileWrite(File, gSubSectionSep)
+ AllPcdDict = {}
+ for Key in PcdDict:
+ AllPcdDict[Key] = {}
+ for Type in PcdDict[Key]:
+ for Pcd in PcdDict[Key][Type]:
+ AllPcdDict[Key][(Pcd.TokenCName, Type)] = Pcd
+ for Key in sorted(AllPcdDict):
+ #
+ # Group PCD by their token space GUID C Name
+ #
+ First = True
+ for PcdTokenCName, Type in sorted(AllPcdDict[Key]):
+ #
+ # Group PCD by their usage type
+ #
+ Pcd = AllPcdDict[Key][(PcdTokenCName, Type)]
+ TypeName, DecType = gPcdTypeMap.get(Type, ("", Type))
+ MixedPcdFlag = False
+ if GlobalData.MixedPcd:
+ for PcdKey in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdKey]:
+ PcdTokenCName = PcdKey[0]
+ MixedPcdFlag = True
+ if MixedPcdFlag and not ModulePcdSet:
+ continue
+ #
+ # Get PCD default value and their override relationship
+ #
+ DecDefaultValue = self.DecPcdDefault.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, DecType))
+ DscDefaultValue = self.DscPcdDefault.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))
+ DscDefaultValBak = DscDefaultValue
+ Field = ''
+ for (CName, Guid, Field) in self.FdfPcdSet:
+ if CName == PcdTokenCName and Guid == Key:
+ DscDefaultValue = self.FdfPcdSet[(CName, Guid, Field)]
+ break
+ if DscDefaultValue != DscDefaultValBak:
+ try:
+ DscDefaultValue = ValueExpressionEx(DscDefaultValue, Pcd.DatumType, self._GuidDict)(True)
+ except BadExpression as DscDefaultValue:
+ EdkLogger.error('BuildReport', FORMAT_INVALID, "PCD Value: %s, Type: %s" %(DscDefaultValue, Pcd.DatumType))
+
+ InfDefaultValue = None
+
+ PcdValue = DecDefaultValue
+ if DscDefaultValue:
+ PcdValue = DscDefaultValue
+ #The DefaultValue of StructurePcd already be the latest, no need to update.
+ if not self.IsStructurePcd(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):
+ Pcd.DefaultValue = PcdValue
+ PcdComponentValue = None
+ if ModulePcdSet is not None:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Type) not in ModulePcdSet:
+ continue
+ InfDefaultValue, PcdComponentValue = ModulePcdSet[Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Type]
+ PcdValue = PcdComponentValue
+ #The DefaultValue of StructurePcd already be the latest, no need to update.
+ if not self.IsStructurePcd(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):
+ Pcd.DefaultValue = PcdValue
+ if InfDefaultValue:
+ try:
+ InfDefaultValue = ValueExpressionEx(InfDefaultValue, Pcd.DatumType, self._GuidDict)(True)
+ except BadExpression as InfDefaultValue:
+ EdkLogger.error('BuildReport', FORMAT_INVALID, "PCD Value: %s, Type: %s" % (InfDefaultValue, Pcd.DatumType))
+ if InfDefaultValue == "":
+ InfDefaultValue = None
+
+ BuildOptionMatch = False
+ if GlobalData.BuildOptionPcd:
+ for pcd in GlobalData.BuildOptionPcd:
+ if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) == (pcd[0], pcd[1]):
+ if pcd[2]:
+ continue
+ PcdValue = pcd[3]
+ #The DefaultValue of StructurePcd already be the latest, no need to update.
+ if not self.IsStructurePcd(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):
+ Pcd.DefaultValue = PcdValue
+ BuildOptionMatch = True
+ break
+
+ if First:
+ if ModulePcdSet is None:
+ FileWrite(File, "")
+ FileWrite(File, Key)
+ First = False
+
+
+ if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
+ if PcdValue.startswith('0') and not PcdValue.lower().startswith('0x') and \
+ len(PcdValue) > 1 and PcdValue.lstrip('0'):
+ PcdValue = PcdValue.lstrip('0')
+ PcdValueNumber = int(PcdValue.strip(), 0)
+ if DecDefaultValue is None:
+ DecMatch = True
+ else:
+ if DecDefaultValue.startswith('0') and not DecDefaultValue.lower().startswith('0x') and \
+ len(DecDefaultValue) > 1 and DecDefaultValue.lstrip('0'):
+ DecDefaultValue = DecDefaultValue.lstrip('0')
+ DecDefaultValueNumber = int(DecDefaultValue.strip(), 0)
+ DecMatch = (DecDefaultValueNumber == PcdValueNumber)
+
+ if InfDefaultValue is None:
+ InfMatch = True
+ else:
+ if InfDefaultValue.startswith('0') and not InfDefaultValue.lower().startswith('0x') and \
+ len(InfDefaultValue) > 1 and InfDefaultValue.lstrip('0'):
+ InfDefaultValue = InfDefaultValue.lstrip('0')
+ InfDefaultValueNumber = int(InfDefaultValue.strip(), 0)
+ InfMatch = (InfDefaultValueNumber == PcdValueNumber)
+
+ if DscDefaultValue is None:
+ DscMatch = True
+ else:
+ if DscDefaultValue.startswith('0') and not DscDefaultValue.lower().startswith('0x') and \
+ len(DscDefaultValue) > 1 and DscDefaultValue.lstrip('0'):
+ DscDefaultValue = DscDefaultValue.lstrip('0')
+ DscDefaultValueNumber = int(DscDefaultValue.strip(), 0)
+ DscMatch = (DscDefaultValueNumber == PcdValueNumber)
+ else:
+ if DecDefaultValue is None:
+ DecMatch = True
+ else:
+ DecMatch = (DecDefaultValue.strip() == PcdValue.strip())
+
+ if InfDefaultValue is None:
+ InfMatch = True
+ else:
+ InfMatch = (InfDefaultValue.strip() == PcdValue.strip())
+
+ if DscDefaultValue is None:
+ DscMatch = True
+ else:
+ DscMatch = (DscDefaultValue.strip() == PcdValue.strip())
+
+ IsStructure = False
+ if self.IsStructurePcd(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):
+ IsStructure = True
+ if TypeName in ('DYNVPD', 'DEXVPD'):
+ SkuInfoList = Pcd.SkuInfoList
+ Pcd = GlobalData.gStructurePcd[self.Arch][(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)]
+ if ModulePcdSet and ModulePcdSet.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Type)):
+ InfDefaultValue, PcdComponentValue = ModulePcdSet[Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Type]
+ DscDefaultValBak = Pcd.DefaultValue
+ Pcd.DefaultValue = PcdComponentValue
+
+ Pcd.DatumType = Pcd.StructName
+ if TypeName in ('DYNVPD', 'DEXVPD'):
+ Pcd.SkuInfoList = SkuInfoList
+ if Pcd.PcdValueFromComm or Pcd.PcdFieldValueFromComm:
+ BuildOptionMatch = True
+ DecMatch = False
+ elif Pcd.PcdValueFromFdf or Pcd.PcdFieldValueFromFdf:
+ DscDefaultValue = True
+ DscMatch = True
+ DecMatch = False
+ else:
+ if Pcd.Type in PCD_DYNAMIC_TYPE_SET | PCD_DYNAMIC_EX_TYPE_SET:
+ DscOverride = False
+ if Pcd.DefaultFromDSC:
+ DscOverride = True
+ else:
+ DictLen = 0
+ for item in Pcd.SkuOverrideValues:
+ DictLen += len(Pcd.SkuOverrideValues[item])
+ if not DictLen:
+ DscOverride = False
+ else:
+ if not Pcd.SkuInfoList:
+ OverrideValues = Pcd.SkuOverrideValues
+ if OverrideValues:
+ for Data in OverrideValues.values():
+ Struct = list(Data.values())
+ if Struct:
+ DscOverride = self.ParseStruct(Struct[0])
+ break
+ else:
+ SkuList = sorted(Pcd.SkuInfoList.keys())
+ for Sku in SkuList:
+ SkuInfo = Pcd.SkuInfoList[Sku]
+ if SkuInfo.DefaultStoreDict:
+ DefaultStoreList = sorted(SkuInfo.DefaultStoreDict.keys())
+ for DefaultStore in DefaultStoreList:
+ OverrideValues = Pcd.SkuOverrideValues.get(Sku)
+ if OverrideValues:
+ DscOverride = self.ParseStruct(OverrideValues[DefaultStore])
+ if DscOverride:
+ break
+ if DscOverride:
+ break
+ if DscOverride:
+ DscDefaultValue = True
+ DscMatch = True
+ DecMatch = False
+ else:
+ DecMatch = True
+ else:
+ if Pcd.DscRawValue or (ModuleGuid and ModuleGuid.replace("-","S") in Pcd.PcdValueFromComponents):
+ DscDefaultValue = True
+ DscMatch = True
+ DecMatch = False
+ else:
+ DscDefaultValue = False
+ DecMatch = True
+
+ #
+ # Report PCD item according to their override relationship
+ #
+ if Pcd.DatumType == 'BOOLEAN':
+ if DscDefaultValue:
+ DscDefaultValue = str(int(DscDefaultValue, 0))
+ if DecDefaultValue:
+ DecDefaultValue = str(int(DecDefaultValue, 0))
+ if InfDefaultValue:
+ InfDefaultValue = str(int(InfDefaultValue, 0))
+ if Pcd.DefaultValue:
+ Pcd.DefaultValue = str(int(Pcd.DefaultValue, 0))
+ if DecMatch:
+ self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, ' ')
+ elif InfDefaultValue and InfMatch:
+ self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, '*M')
+ elif BuildOptionMatch:
+ self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, '*B')
+ else:
+ if PcdComponentValue:
+ self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, PcdComponentValue, DecMatch, DecDefaultValue, '*M', ModuleGuid)
+ elif DscDefaultValue and DscMatch:
+ if (Pcd.TokenCName, Key, Field) in self.FdfPcdSet:
+ self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, '*F')
+ else:
+ self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, '*P')
+
+
+ if ModulePcdSet is None:
+ if IsStructure:
+ continue
+ if not TypeName in ('PATCH', 'FLAG', 'FIXED'):
+ continue
+ if not BuildOptionMatch:
+ ModuleOverride = self.ModulePcdOverride.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName), {})
+ for ModulePath in ModuleOverride:
+ ModuleDefault = ModuleOverride[ModulePath]
+ if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
+ if ModuleDefault.startswith('0') and not ModuleDefault.lower().startswith('0x') and \
+ len(ModuleDefault) > 1 and ModuleDefault.lstrip('0'):
+ ModuleDefault = ModuleDefault.lstrip('0')
+ ModulePcdDefaultValueNumber = int(ModuleDefault.strip(), 0)
+ Match = (ModulePcdDefaultValueNumber == PcdValueNumber)
+ if Pcd.DatumType == 'BOOLEAN':
+ ModuleDefault = str(ModulePcdDefaultValueNumber)
+ else:
+ Match = (ModuleDefault.strip() == PcdValue.strip())
+ if Match:
+ continue
+ IsByteArray, ArrayList = ByteArrayForamt(ModuleDefault.strip())
+ if IsByteArray:
+ FileWrite(File, ' *M %-*s = %s' % (self.MaxLen + 15, ModulePath, '{'))
+ for Array in ArrayList:
+ FileWrite(File, Array)
+ else:
+ Value = ModuleDefault.strip()
+ if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
+ if Value.startswith(('0x', '0X')):
+ Value = '{} ({:d})'.format(Value, int(Value, 0))
+ else:
+ Value = "0x{:X} ({})".format(int(Value, 0), Value)
+ FileWrite(File, ' *M %-*s = %s' % (self.MaxLen + 15, ModulePath, Value))
+
+ if ModulePcdSet is None:
+ FileWrite(File, gSectionEnd)
+ else:
+ if not ReportSubType and ModulePcdSet:
+ FileWrite(File, gSubSectionEnd)
+
+ def ParseStruct(self, struct):
+ HasDscOverride = False
+ if struct:
+ for _, Values in list(struct.items()):
+ for Key, value in Values.items():
+ if value[1] and value[1].endswith('.dsc'):
+ HasDscOverride = True
+ break
+ if HasDscOverride == True:
+ break
+ return HasDscOverride
+
+ def PrintPcdDefault(self, File, Pcd, IsStructure, DscMatch, DscDefaultValue, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue):
+ if not DscMatch and DscDefaultValue is not None:
+ Value = DscDefaultValue.strip()
+ IsByteArray, ArrayList = ByteArrayForamt(Value)
+ if IsByteArray:
+ FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'DSC DEFAULT', "{"))
+ for Array in ArrayList:
+ FileWrite(File, Array)
+ else:
+ if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
+ if Value.startswith(('0x', '0X')):
+ Value = '{} ({:d})'.format(Value, int(Value, 0))
+ else:
+ Value = "0x{:X} ({})".format(int(Value, 0), Value)
+ FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'DSC DEFAULT', Value))
+ if not InfMatch and InfDefaultValue is not None:
+ Value = InfDefaultValue.strip()
+ IsByteArray, ArrayList = ByteArrayForamt(Value)
+ if IsByteArray:
+ FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'INF DEFAULT', "{"))
+ for Array in ArrayList:
+ FileWrite(File, Array)
+ else:
+ if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
+ if Value.startswith(('0x', '0X')):
+ Value = '{} ({:d})'.format(Value, int(Value, 0))
+ else:
+ Value = "0x{:X} ({})".format(int(Value, 0), Value)
+ FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'INF DEFAULT', Value))
+
+ if not DecMatch and DecDefaultValue is not None:
+ Value = DecDefaultValue.strip()
+ IsByteArray, ArrayList = ByteArrayForamt(Value)
+ if IsByteArray:
+ FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'DEC DEFAULT', "{"))
+ for Array in ArrayList:
+ FileWrite(File, Array)
+ else:
+ if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
+ if Value.startswith(('0x', '0X')):
+ Value = '{} ({:d})'.format(Value, int(Value, 0))
+ else:
+ Value = "0x{:X} ({})".format(int(Value, 0), Value)
+ FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'DEC DEFAULT', Value))
+ if IsStructure:
+ for filedvalues in Pcd.DefaultValues.values():
+ self.PrintStructureInfo(File, filedvalues)
+ if DecMatch and IsStructure:
+ for filedvalues in Pcd.DefaultValues.values():
+ self.PrintStructureInfo(File, filedvalues)
+
+ def PrintPcdValue(self, File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValue, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, Flag = ' ',ModuleGuid=None):
+ if not Pcd.SkuInfoList:
+ Value = Pcd.DefaultValue
+ IsByteArray, ArrayList = ByteArrayForamt(Value)
+ if IsByteArray:
+ FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '{'))
+ for Array in ArrayList:
+ FileWrite(File, Array)
+ else:
+ if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
+ if Value.startswith('0') and not Value.lower().startswith('0x') and len(Value) > 1 and Value.lstrip('0'):
+ Value = Value.lstrip('0')
+ if Value.startswith(('0x', '0X')):
+ Value = '{} ({:d})'.format(Value, int(Value, 0))
+ else:
+ Value = "0x{:X} ({})".format(int(Value, 0), Value)
+ FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', Value))
+ if IsStructure:
+ FiledOverrideFlag = False
+ if (Pcd.TokenCName,Pcd.TokenSpaceGuidCName) in GlobalData.gPcdSkuOverrides:
+ OverrideValues = GlobalData.gPcdSkuOverrides[(Pcd.TokenCName,Pcd.TokenSpaceGuidCName)]
+ else:
+ OverrideValues = Pcd.SkuOverrideValues
+ FieldOverrideValues = None
+ if OverrideValues:
+ for Data in OverrideValues.values():
+ Struct = list(Data.values())
+ if Struct:
+ FieldOverrideValues = Struct[0]
+ FiledOverrideFlag = True
+ break
+ if Pcd.PcdFiledValueFromDscComponent and ModuleGuid and ModuleGuid.replace("-","S") in Pcd.PcdFiledValueFromDscComponent:
+ FieldOverrideValues = Pcd.PcdFiledValueFromDscComponent[ModuleGuid.replace("-","S")]
+ if FieldOverrideValues:
+ OverrideFieldStruct = self.OverrideFieldValue(Pcd, FieldOverrideValues)
+ self.PrintStructureInfo(File, OverrideFieldStruct)
+
+ if not FiledOverrideFlag and (Pcd.PcdFieldValueFromComm or Pcd.PcdFieldValueFromFdf):
+ OverrideFieldStruct = self.OverrideFieldValue(Pcd, {})
+ self.PrintStructureInfo(File, OverrideFieldStruct)
+ self.PrintPcdDefault(File, Pcd, IsStructure, DscMatch, DscDefaultValue, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue)
+ else:
+ FirstPrint = True
+ SkuList = sorted(Pcd.SkuInfoList.keys())
+ for Sku in SkuList:
+ SkuInfo = Pcd.SkuInfoList[Sku]
+ SkuIdName = SkuInfo.SkuIdName
+ if TypeName in ('DYNHII', 'DEXHII'):
+ if SkuInfo.DefaultStoreDict:
+ DefaultStoreList = sorted(SkuInfo.DefaultStoreDict.keys())
+ for DefaultStore in DefaultStoreList:
+ Value = SkuInfo.DefaultStoreDict[DefaultStore]
+ IsByteArray, ArrayList = ByteArrayForamt(Value)
+ if Pcd.DatumType == 'BOOLEAN':
+ Value = str(int(Value, 0))
+ if FirstPrint:
+ FirstPrint = False
+ if IsByteArray:
+ if self.DefaultStoreSingle and self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '{'))
+ elif self.DefaultStoreSingle and not self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', '{'))
+ elif not self.DefaultStoreSingle and self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + DefaultStore + ')', '{'))
+ else:
+ FileWrite(File, ' %-*s : %6s %10s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', '(' + DefaultStore + ')', '{'))
+ for Array in ArrayList:
+ FileWrite(File, Array)
+ else:
+ if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
+ if Value.startswith(('0x', '0X')):
+ Value = '{} ({:d})'.format(Value, int(Value, 0))
+ else:
+ Value = "0x{:X} ({})".format(int(Value, 0), Value)
+ if self.DefaultStoreSingle and self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', Value))
+ elif self.DefaultStoreSingle and not self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', Value))
+ elif not self.DefaultStoreSingle and self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + DefaultStore + ')', Value))
+ else:
+ FileWrite(File, ' %-*s : %6s %10s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', '(' + DefaultStore + ')', Value))
+ else:
+ if IsByteArray:
+ if self.DefaultStoreSingle and self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '{'))
+ elif self.DefaultStoreSingle and not self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', '{'))
+ elif not self.DefaultStoreSingle and self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + DefaultStore + ')', '{'))
+ else:
+ FileWrite(File, ' %-*s : %6s %10s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', '(' + DefaultStore + ')', '{'))
+ for Array in ArrayList:
+ FileWrite(File, Array)
+ else:
+ if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
+ if Value.startswith(('0x', '0X')):
+ Value = '{} ({:d})'.format(Value, int(Value, 0))
+ else:
+ Value = "0x{:X} ({})".format(int(Value, 0), Value)
+ if self.DefaultStoreSingle and self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', Value))
+ elif self.DefaultStoreSingle and not self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', Value))
+ elif not self.DefaultStoreSingle and self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + DefaultStore + ')', Value))
+ else:
+ FileWrite(File, ' %-*s : %6s %10s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', '(' + DefaultStore + ')', Value))
+ FileWrite(File, '%*s: %s: %s' % (self.MaxLen + 4, SkuInfo.VariableGuid, SkuInfo.VariableName, SkuInfo.VariableOffset))
+ if IsStructure:
+ OverrideValues = Pcd.SkuOverrideValues.get(Sku)
+ if OverrideValues:
+ OverrideFieldStruct = self.OverrideFieldValue(Pcd, OverrideValues[DefaultStore])
+ self.PrintStructureInfo(File, OverrideFieldStruct)
+ self.PrintPcdDefault(File, Pcd, IsStructure, DscMatch, DscDefaultValue, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue)
+ else:
+ Value = SkuInfo.DefaultValue
+ IsByteArray, ArrayList = ByteArrayForamt(Value)
+ if Pcd.DatumType == 'BOOLEAN':
+ Value = str(int(Value, 0))
+ if FirstPrint:
+ FirstPrint = False
+ if IsByteArray:
+ if self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', "{"))
+ else:
+ FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', "{"))
+ for Array in ArrayList:
+ FileWrite(File, Array)
+ else:
+ if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
+ if Value.startswith(('0x', '0X')):
+ Value = '{} ({:d})'.format(Value, int(Value, 0))
+ else:
+ Value = "0x{:X} ({})".format(int(Value, 0), Value)
+ if self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', Value))
+ else:
+ FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', Value))
+ else:
+ if IsByteArray:
+ if self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', "{"))
+ else:
+ FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', "{"))
+ for Array in ArrayList:
+ FileWrite(File, Array)
+ else:
+ if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
+ if Value.startswith(('0x', '0X')):
+ Value = '{} ({:d})'.format(Value, int(Value, 0))
+ else:
+ Value = "0x{:X} ({})".format(int(Value, 0), Value)
+ if self.SkuSingle:
+ FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', Value))
+ else:
+ FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', Value))
+ if TypeName in ('DYNVPD', 'DEXVPD'):
+ FileWrite(File, '%*s' % (self.MaxLen + 4, SkuInfo.VpdOffset))
+ VPDPcdItem = (Pcd.TokenSpaceGuidCName + '.' + PcdTokenCName, SkuIdName, SkuInfo.VpdOffset, Pcd.MaxDatumSize, SkuInfo.DefaultValue)
+ if VPDPcdItem not in VPDPcdList:
+ PcdGuidList = self.UnusedPcds.get(Pcd.TokenSpaceGuidCName)
+ if PcdGuidList:
+ PcdList = PcdGuidList.get(Pcd.Type)
+ if not PcdList:
+ VPDPcdList.append(VPDPcdItem)
+ for VpdPcd in PcdList:
+ if PcdTokenCName == VpdPcd.TokenCName:
+ break
+ else:
+ VPDPcdList.append(VPDPcdItem)
+ if IsStructure:
+ FiledOverrideFlag = False
+ OverrideValues = Pcd.SkuOverrideValues.get(Sku)
+ if OverrideValues:
+ Keys = list(OverrideValues.keys())
+ OverrideFieldStruct = self.OverrideFieldValue(Pcd, OverrideValues[Keys[0]])
+ self.PrintStructureInfo(File, OverrideFieldStruct)
+ FiledOverrideFlag = True
+ if not FiledOverrideFlag and (Pcd.PcdFieldValueFromComm or Pcd.PcdFieldValueFromFdf):
+ OverrideFieldStruct = self.OverrideFieldValue(Pcd, {})
+ self.PrintStructureInfo(File, OverrideFieldStruct)
+ self.PrintPcdDefault(File, Pcd, IsStructure, DscMatch, DscDefaultValue, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue)
+
+ def OverrideFieldValue(self, Pcd, OverrideStruct):
+ OverrideFieldStruct = collections.OrderedDict()
+ if OverrideStruct:
+ for _, Values in OverrideStruct.items():
+ for Key,value in Values.items():
+ if value[1] and value[1].endswith('.dsc'):
+ OverrideFieldStruct[Key] = value
+ if Pcd.PcdFieldValueFromFdf:
+ for Key, Values in Pcd.PcdFieldValueFromFdf.items():
+ if Key in OverrideFieldStruct and Values[0] == OverrideFieldStruct[Key][0]:
+ continue
+ OverrideFieldStruct[Key] = Values
+ if Pcd.PcdFieldValueFromComm:
+ for Key, Values in Pcd.PcdFieldValueFromComm.items():
+ if Key in OverrideFieldStruct and Values[0] == OverrideFieldStruct[Key][0]:
+ continue
+ OverrideFieldStruct[Key] = Values
+ return OverrideFieldStruct
+
+ def PrintStructureInfo(self, File, Struct):
+ for Key, Value in sorted(Struct.items(), key=lambda x: x[0]):
+ if Value[1] and 'build command options' in Value[1]:
+ FileWrite(File, ' *B %-*s = %s' % (self.MaxLen + 4, '.' + Key, Value[0]))
+ elif Value[1] and Value[1].endswith('.fdf'):
+ FileWrite(File, ' *F %-*s = %s' % (self.MaxLen + 4, '.' + Key, Value[0]))
+ else:
+ FileWrite(File, ' %-*s = %s' % (self.MaxLen + 4, '.' + Key, Value[0]))
+
+ def StrtoHex(self, value):
+ try:
+ value = hex(int(value))
+ return value
+ except:
+ if value.startswith("L\"") and value.endswith("\""):
+ valuelist = []
+ for ch in value[2:-1]:
+ valuelist.append(hex(ord(ch)))
+ valuelist.append('0x00')
+ return valuelist
+ elif value.startswith("\"") and value.endswith("\""):
+ return hex(ord(value[1:-1]))
+ elif value.startswith("{") and value.endswith("}"):
+ valuelist = []
+ if ',' not in value:
+ return value[1:-1]
+ for ch in value[1:-1].split(','):
+ ch = ch.strip()
+ if ch.startswith('0x') or ch.startswith('0X'):
+ valuelist.append(ch)
+ continue
+ try:
+ valuelist.append(hex(int(ch.strip())))
+ except:
+ pass
+ return valuelist
+ else:
+ return value
+
+ def IsStructurePcd(self, PcdToken, PcdTokenSpaceGuid):
+ if GlobalData.gStructurePcd and (self.Arch in GlobalData.gStructurePcd) and ((PcdToken, PcdTokenSpaceGuid) in GlobalData.gStructurePcd[self.Arch]):
+ return True
+ else:
+ return False
+
+##
+# Reports platform and module Prediction information
+#
+# This class reports the platform execution order prediction section and
+# module load fixed address prediction subsection in the build report file.
+#
+class PredictionReport(object):
+ ##
+ # Constructor function for class PredictionReport
+ #
+ # This constructor function generates PredictionReport object for the platform.
+ #
+ # @param self: The object pointer
+ # @param Wa Workspace context information
+ #
+ def __init__(self, Wa):
+ self._MapFileName = os.path.join(Wa.BuildDir, Wa.Name + ".map")
+ self._MapFileParsed = False
+ self._EotToolInvoked = False
+ self._FvDir = Wa.FvDir
+ self._EotDir = Wa.BuildDir
+ self._FfsEntryPoint = {}
+ self._GuidMap = {}
+ self._SourceList = []
+ self.FixedMapDict = {}
+ self.ItemList = []
+ self.MaxLen = 0
+
+ #
+ # Collect all platform reference source files and GUID C Name
+ #
+ for Pa in Wa.AutoGenObjectList:
+ for Module in Pa.LibraryAutoGenList + Pa.ModuleAutoGenList:
+ #
+ # BASE typed modules are EFI agnostic, so we need not scan
+ # their source code to find PPI/Protocol produce or consume
+ # information.
+ #
+ if Module.ModuleType == SUP_MODULE_BASE:
+ continue
+ #
+ # Add module referenced source files
+ #
+ self._SourceList.append(str(Module))
+ IncludeList = {}
+ for Source in Module.SourceFileList:
+ if os.path.splitext(str(Source))[1].lower() == ".c":
+ self._SourceList.append(" " + str(Source))
+ FindIncludeFiles(Source.Path, Module.IncludePathList, IncludeList)
+ for IncludeFile in IncludeList.values():
+ self._SourceList.append(" " + IncludeFile)
+
+ for Guid in Module.PpiList:
+ self._GuidMap[Guid] = GuidStructureStringToGuidString(Module.PpiList[Guid])
+ for Guid in Module.ProtocolList:
+ self._GuidMap[Guid] = GuidStructureStringToGuidString(Module.ProtocolList[Guid])
+ for Guid in Module.GuidList:
+ self._GuidMap[Guid] = GuidStructureStringToGuidString(Module.GuidList[Guid])
+
+ if Module.Guid and not Module.IsLibrary:
+ EntryPoint = " ".join(Module.Module.ModuleEntryPointList)
+
+ RealEntryPoint = "_ModuleEntryPoint"
+
+ self._FfsEntryPoint[Module.Guid.upper()] = (EntryPoint, RealEntryPoint)
+
+
+ #
+ # Collect platform firmware volume list as the input of EOT.
+ #
+ self._FvList = []
+ if Wa.FdfProfile:
+ for Fd in Wa.FdfProfile.FdDict:
+ for FdRegion in Wa.FdfProfile.FdDict[Fd].RegionList:
+ if FdRegion.RegionType != BINARY_FILE_TYPE_FV:
+ continue
+ for FvName in FdRegion.RegionDataList:
+ if FvName in self._FvList:
+ continue
+ self._FvList.append(FvName)
+ for Ffs in Wa.FdfProfile.FvDict[FvName.upper()].FfsList:
+ for Section in Ffs.SectionList:
+ try:
+ for FvSection in Section.SectionList:
+ if FvSection.FvName in self._FvList:
+ continue
+ self._FvList.append(FvSection.FvName)
+ except AttributeError:
+ pass
+
+
+ ##
+ # Parse platform fixed address map files
+ #
+ # This function parses the platform final fixed address map file to get
+ # the database of predicted fixed address for module image base, entry point
+ # etc.
+ #
+ # @param self: The object pointer
+ #
+ def _ParseMapFile(self):
+ if self._MapFileParsed:
+ return
+ self._MapFileParsed = True
+ if os.path.isfile(self._MapFileName):
+ try:
+ FileContents = open(self._MapFileName).read()
+ for Match in gMapFileItemPattern.finditer(FileContents):
+ AddressType = Match.group(1)
+ BaseAddress = Match.group(2)
+ EntryPoint = Match.group(3)
+ Guid = Match.group(4).upper()
+ List = self.FixedMapDict.setdefault(Guid, [])
+ List.append((AddressType, BaseAddress, "*I"))
+ List.append((AddressType, EntryPoint, "*E"))
+ except:
+ EdkLogger.warn(None, "Cannot open file to read", self._MapFileName)
+
+ ##
+ # Invokes EOT tool to get the predicted the execution order.
+ #
+ # This function invokes EOT tool to calculate the predicted dispatch order
+ #
+ # @param self: The object pointer
+ #
+ def _InvokeEotTool(self):
+ if self._EotToolInvoked:
+ return
+
+ self._EotToolInvoked = True
+ FvFileList = []
+ for FvName in self._FvList:
+ FvFile = os.path.join(self._FvDir, FvName + ".Fv")
+ if os.path.isfile(FvFile):
+ FvFileList.append(FvFile)
+
+ if len(FvFileList) == 0:
+ return
+ #
+ # Write source file list and GUID file list to an intermediate file
+ # as the input for EOT tool and dispatch List as the output file
+ # from EOT tool.
+ #
+ SourceList = os.path.join(self._EotDir, "SourceFile.txt")
+ GuidList = os.path.join(self._EotDir, "GuidList.txt")
+ DispatchList = os.path.join(self._EotDir, "Dispatch.txt")
+
+ TempFile = []
+ for Item in self._SourceList:
+ FileWrite(TempFile, Item)
+ SaveFileOnChange(SourceList, "".join(TempFile), False)
+ TempFile = []
+ for Key in self._GuidMap:
+ FileWrite(TempFile, "%s %s" % (Key, self._GuidMap[Key]))
+ SaveFileOnChange(GuidList, "".join(TempFile), False)
+
+ try:
+ from Eot.EotMain import Eot
+
+ #
+ # Invoke EOT tool and echo its runtime performance
+ #
+ EotStartTime = time.time()
+ Eot(CommandLineOption=False, SourceFileList=SourceList, GuidList=GuidList,
+ FvFileList=' '.join(FvFileList), Dispatch=DispatchList, IsInit=True)
+ EotEndTime = time.time()
+ EotDuration = time.strftime("%H:%M:%S", time.gmtime(int(round(EotEndTime - EotStartTime))))
+ EdkLogger.quiet("EOT run time: %s\n" % EotDuration)
+
+ #
+ # Parse the output of EOT tool
+ #
+ for Line in open(DispatchList):
+ if len(Line.split()) < 4:
+ continue
+ (Guid, Phase, FfsName, FilePath) = Line.split()
+ Symbol = self._FfsEntryPoint.get(Guid, [FfsName, ""])[0]
+ if len(Symbol) > self.MaxLen:
+ self.MaxLen = len(Symbol)
+ self.ItemList.append((Phase, Symbol, FilePath))
+ except:
+ EdkLogger.quiet("(Python %s on %s\n%s)" % (platform.python_version(), sys.platform, traceback.format_exc()))
+ EdkLogger.warn(None, "Failed to generate execution order prediction report, for some error occurred in executing EOT.")
+
+
+ ##
+ # Generate platform execution order report
+ #
+ # This function generates the predicted module execution order.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ #
+ def _GenerateExecutionOrderReport(self, File):
+ self._InvokeEotTool()
+ if len(self.ItemList) == 0:
+ return
+ FileWrite(File, gSectionStart)
+ FileWrite(File, "Execution Order Prediction")
+ FileWrite(File, "*P PEI phase")
+ FileWrite(File, "*D DXE phase")
+ FileWrite(File, "*E Module INF entry point name")
+ FileWrite(File, "*N Module notification function name")
+
+ FileWrite(File, "Type %-*s %s" % (self.MaxLen, "Symbol", "Module INF Path"))
+ FileWrite(File, gSectionSep)
+ for Item in self.ItemList:
+ FileWrite(File, "*%sE %-*s %s" % (Item[0], self.MaxLen, Item[1], Item[2]))
+
+ FileWrite(File, gSectionStart)
+
+ ##
+ # Generate Fixed Address report.
+ #
+ # This function generate the predicted fixed address report for a module
+ # specified by Guid.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ # @param Guid The module Guid value.
+ # @param NotifyList The list of all notify function in a module
+ #
+ def _GenerateFixedAddressReport(self, File, Guid, NotifyList):
+ self._ParseMapFile()
+ FixedAddressList = self.FixedMapDict.get(Guid)
+ if not FixedAddressList:
+ return
+
+ FileWrite(File, gSubSectionStart)
+ FileWrite(File, "Fixed Address Prediction")
+ FileWrite(File, "*I Image Loading Address")
+ FileWrite(File, "*E Entry Point Address")
+ FileWrite(File, "*N Notification Function Address")
+ FileWrite(File, "*F Flash Address")
+ FileWrite(File, "*M Memory Address")
+ FileWrite(File, "*S SMM RAM Offset")
+ FileWrite(File, "TOM Top of Memory")
+
+ FileWrite(File, "Type Address Name")
+ FileWrite(File, gSubSectionSep)
+ for Item in FixedAddressList:
+ Type = Item[0]
+ Value = Item[1]
+ Symbol = Item[2]
+ if Symbol == "*I":
+ Name = "(Image Base)"
+ elif Symbol == "*E":
+ Name = self._FfsEntryPoint.get(Guid, ["", "_ModuleEntryPoint"])[1]
+ elif Symbol in NotifyList:
+ Name = Symbol
+ Symbol = "*N"
+ else:
+ continue
+
+ if "Flash" in Type:
+ Symbol += "F"
+ elif "Memory" in Type:
+ Symbol += "M"
+ else:
+ Symbol += "S"
+
+ if Value[0] == "-":
+ Value = "TOM" + Value
+
+ FileWrite(File, "%s %-16s %s" % (Symbol, Value, Name))
+
+ ##
+ # Generate report for the prediction part
+ #
+ # This function generate the predicted fixed address report for a module or
+ # predicted module execution order for a platform.
+ # If the input Guid is None, then, it generates the predicted module execution order;
+ # otherwise it generated the module fixed loading address for the module specified by
+ # Guid.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ # @param Guid The module Guid value.
+ #
+ def GenerateReport(self, File, Guid):
+ if Guid:
+ self._GenerateFixedAddressReport(File, Guid.upper(), [])
+ else:
+ self._GenerateExecutionOrderReport(File)
+
+##
+# Reports FD region information
+#
+# This class reports the FD subsection in the build report file.
+# It collects region information of platform flash device.
+# If the region is a firmware volume, it lists the set of modules
+# and its space information; otherwise, it only lists its region name,
+# base address and size in its sub-section header.
+# If there are nesting FVs, the nested FVs will list immediate after
+# this FD region subsection
+#
+class FdRegionReport(object):
+ ##
+ # Discover all the nested FV name list.
+ #
+ # This is an internal worker function to discover the all the nested FV information
+ # in the parent firmware volume. It uses deep first search algorithm recursively to
+ # find all the FV list name and append them to the list.
+ #
+ # @param self The object pointer
+ # @param FvName The name of current firmware file system
+ # @param Wa Workspace context information
+ #
+ def _DiscoverNestedFvList(self, FvName, Wa):
+ FvDictKey=FvName.upper()
+ if FvDictKey in Wa.FdfProfile.FvDict:
+ for Ffs in Wa.FdfProfile.FvDict[FvName.upper()].FfsList:
+ for Section in Ffs.SectionList:
+ try:
+ for FvSection in Section.SectionList:
+ if FvSection.FvName in self.FvList:
+ continue
+ self._GuidsDb[Ffs.NameGuid.upper()] = FvSection.FvName
+ self.FvList.append(FvSection.FvName)
+ self.FvInfo[FvSection.FvName] = ("Nested FV", 0, 0)
+ self._DiscoverNestedFvList(FvSection.FvName, Wa)
+ except AttributeError:
+ pass
+
+ ##
+ # Constructor function for class FdRegionReport
+ #
+ # This constructor function generates FdRegionReport object for a specified FdRegion.
+ # If the FdRegion is a firmware volume, it will recursively find all its nested Firmware
+ # volume list. This function also collects GUID map in order to dump module identification
+ # in the final report.
+ #
+ # @param self: The object pointer
+ # @param FdRegion The current FdRegion object
+ # @param Wa Workspace context information
+ #
+ def __init__(self, FdRegion, Wa):
+ self.Type = FdRegion.RegionType
+ self.BaseAddress = FdRegion.Offset
+ self.Size = FdRegion.Size
+ self.FvList = []
+ self.FvInfo = {}
+ self._GuidsDb = {}
+ self._FvDir = Wa.FvDir
+ self._WorkspaceDir = Wa.WorkspaceDir
+
+ #
+ # If the input FdRegion is not a firmware volume,
+ # we are done.
+ #
+ if self.Type != BINARY_FILE_TYPE_FV:
+ return
+
+ #
+ # Find all nested FVs in the FdRegion
+ #
+ for FvName in FdRegion.RegionDataList:
+ if FvName in self.FvList:
+ continue
+ self.FvList.append(FvName)
+ self.FvInfo[FvName] = ("Fd Region", self.BaseAddress, self.Size)
+ self._DiscoverNestedFvList(FvName, Wa)
+
+ PlatformPcds = {}
+ #
+ # Collect PCDs declared in DEC files.
+ #
+ for Pa in Wa.AutoGenObjectList:
+ for Package in Pa.PackageList:
+ for (TokenCName, TokenSpaceGuidCName, DecType) in Package.Pcds:
+ DecDefaultValue = Package.Pcds[TokenCName, TokenSpaceGuidCName, DecType].DefaultValue
+ PlatformPcds[(TokenCName, TokenSpaceGuidCName)] = DecDefaultValue
+ #
+ # Collect PCDs defined in DSC file
+ #
+ for Pa in Wa.AutoGenObjectList:
+ for (TokenCName, TokenSpaceGuidCName) in Pa.Platform.Pcds:
+ DscDefaultValue = Pa.Platform.Pcds[(TokenCName, TokenSpaceGuidCName)].DefaultValue
+ PlatformPcds[(TokenCName, TokenSpaceGuidCName)] = DscDefaultValue
+
+ #
+ # Add PEI and DXE a priori files GUIDs defined in PI specification.
+ #
+ self._GuidsDb[PEI_APRIORI_GUID] = "PEI Apriori"
+ self._GuidsDb[DXE_APRIORI_GUID] = "DXE Apriori"
+ #
+ # Add ACPI table storage file
+ #
+ self._GuidsDb["7E374E25-8E01-4FEE-87F2-390C23C606CD"] = "ACPI table storage"
+
+ for Pa in Wa.AutoGenObjectList:
+ for ModuleKey in Pa.Platform.Modules:
+ M = Pa.Platform.Modules[ModuleKey].M
+ InfPath = mws.join(Wa.WorkspaceDir, M.MetaFile.File)
+ self._GuidsDb[M.Guid.upper()] = "%s (%s)" % (M.Module.BaseName, InfPath)
+
+ #
+ # Collect the GUID map in the FV firmware volume
+ #
+ for FvName in self.FvList:
+ FvDictKey=FvName.upper()
+ if FvDictKey in Wa.FdfProfile.FvDict:
+ for Ffs in Wa.FdfProfile.FvDict[FvName.upper()].FfsList:
+ try:
+ #
+ # collect GUID map for binary EFI file in FDF file.
+ #
+ Guid = Ffs.NameGuid.upper()
+ Match = gPcdGuidPattern.match(Ffs.NameGuid)
+ if Match:
+ PcdTokenspace = Match.group(1)
+ PcdToken = Match.group(2)
+ if (PcdToken, PcdTokenspace) in PlatformPcds:
+ GuidValue = PlatformPcds[(PcdToken, PcdTokenspace)]
+ Guid = GuidStructureByteArrayToGuidString(GuidValue).upper()
+ for Section in Ffs.SectionList:
+ try:
+ ModuleSectFile = mws.join(Wa.WorkspaceDir, Section.SectFileName)
+ self._GuidsDb[Guid] = ModuleSectFile
+ except AttributeError:
+ pass
+ except AttributeError:
+ pass
+
+
+ ##
+ # Internal worker function to generate report for the FD region
+ #
+ # This internal worker function to generate report for the FD region.
+ # It the type is firmware volume, it lists offset and module identification.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ # @param Title The title for the FD subsection
+ # @param BaseAddress The base address for the FD region
+ # @param Size The size of the FD region
+ # @param FvName The FV name if the FD region is a firmware volume
+ #
+ def _GenerateReport(self, File, Title, Type, BaseAddress, Size=0, FvName=None):
+ FileWrite(File, gSubSectionStart)
+ FileWrite(File, Title)
+ FileWrite(File, "Type: %s" % Type)
+ FileWrite(File, "Base Address: 0x%X" % BaseAddress)
+
+ if self.Type == BINARY_FILE_TYPE_FV:
+ FvTotalSize = 0
+ FvTakenSize = 0
+ FvFreeSize = 0
+ if FvName.upper().endswith('.FV'):
+ FileExt = FvName + ".txt"
+ else:
+ FileExt = FvName + ".Fv.txt"
+
+ if not os.path.isfile(FileExt):
+ FvReportFileName = mws.join(self._WorkspaceDir, FileExt)
+ if not os.path.isfile(FvReportFileName):
+ FvReportFileName = os.path.join(self._FvDir, FileExt)
+ try:
+ #
+ # Collect size info in the firmware volume.
+ #
+ FvReport = open(FvReportFileName).read()
+ Match = gFvTotalSizePattern.search(FvReport)
+ if Match:
+ FvTotalSize = int(Match.group(1), 16)
+ Match = gFvTakenSizePattern.search(FvReport)
+ if Match:
+ FvTakenSize = int(Match.group(1), 16)
+ FvFreeSize = FvTotalSize - FvTakenSize
+ #
+ # Write size information to the report file.
+ #
+ FileWrite(File, "Size: 0x%X (%.0fK)" % (FvTotalSize, FvTotalSize / 1024.0))
+ FileWrite(File, "Fv Name: %s (%.1f%% Full)" % (FvName, FvTakenSize * 100.0 / FvTotalSize))
+ FileWrite(File, "Occupied Size: 0x%X (%.0fK)" % (FvTakenSize, FvTakenSize / 1024.0))
+ FileWrite(File, "Free Size: 0x%X (%.0fK)" % (FvFreeSize, FvFreeSize / 1024.0))
+ FileWrite(File, "Offset Module")
+ FileWrite(File, gSubSectionSep)
+ #
+ # Write module offset and module identification to the report file.
+ #
+ OffsetInfo = {}
+ for Match in gOffsetGuidPattern.finditer(FvReport):
+ Guid = Match.group(2).upper()
+ OffsetInfo[Match.group(1)] = self._GuidsDb.get(Guid, Guid)
+ OffsetList = sorted(OffsetInfo.keys())
+ for Offset in OffsetList:
+ FileWrite (File, "%s %s" % (Offset, OffsetInfo[Offset]))
+ except IOError:
+ EdkLogger.warn(None, "Fail to read report file", FvReportFileName)
+ else:
+ FileWrite(File, "Size: 0x%X (%.0fK)" % (Size, Size / 1024.0))
+ FileWrite(File, gSubSectionEnd)
+
+ ##
+ # Generate report for the FD region
+ #
+ # This function generates report for the FD region.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ #
+ def GenerateReport(self, File):
+ if (len(self.FvList) > 0):
+ for FvItem in self.FvList:
+ Info = self.FvInfo[FvItem]
+ self._GenerateReport(File, Info[0], TAB_FV_DIRECTORY, Info[1], Info[2], FvItem)
+ else:
+ self._GenerateReport(File, "FD Region", self.Type, self.BaseAddress, self.Size)
+
+##
+# Reports FD information
+#
+# This class reports the FD section in the build report file.
+# It collects flash device information for a platform.
+#
+class FdReport(object):
+ ##
+ # Constructor function for class FdReport
+ #
+ # This constructor function generates FdReport object for a specified
+ # firmware device.
+ #
+ # @param self The object pointer
+ # @param Fd The current Firmware device object
+ # @param Wa Workspace context information
+ #
+ def __init__(self, Fd, Wa):
+ self.FdName = Fd.FdUiName
+ self.BaseAddress = Fd.BaseAddress
+ self.Size = Fd.Size
+ self.FdRegionList = [FdRegionReport(FdRegion, Wa) for FdRegion in Fd.RegionList]
+ self.FvPath = os.path.join(Wa.BuildDir, TAB_FV_DIRECTORY)
+ self.VPDBaseAddress = 0
+ self.VPDSize = 0
+ for index, FdRegion in enumerate(Fd.RegionList):
+ if str(FdRegion.RegionType) == 'FILE' and Wa.Platform.VpdToolGuid in str(FdRegion.RegionDataList):
+ self.VPDBaseAddress = self.FdRegionList[index].BaseAddress
+ self.VPDSize = self.FdRegionList[index].Size
+ break
+
+ ##
+ # Generate report for the firmware device.
+ #
+ # This function generates report for the firmware device.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ #
+ def GenerateReport(self, File):
+ FileWrite(File, gSectionStart)
+ FileWrite(File, "Firmware Device (FD)")
+ FileWrite(File, "FD Name: %s" % self.FdName)
+ FileWrite(File, "Base Address: %s" % self.BaseAddress)
+ FileWrite(File, "Size: 0x%X (%.0fK)" % (self.Size, self.Size / 1024.0))
+ if len(self.FdRegionList) > 0:
+ FileWrite(File, gSectionSep)
+ for FdRegionItem in self.FdRegionList:
+ FdRegionItem.GenerateReport(File)
+
+ if VPDPcdList:
+ VPDPcdList.sort(key=lambda x: int(x[2], 0))
+ FileWrite(File, gSubSectionStart)
+ FileWrite(File, "FD VPD Region")
+ FileWrite(File, "Base Address: 0x%X" % self.VPDBaseAddress)
+ FileWrite(File, "Size: 0x%X (%.0fK)" % (self.VPDSize, self.VPDSize / 1024.0))
+ FileWrite(File, gSubSectionSep)
+ for item in VPDPcdList:
+ # Add BaseAddress for offset
+ Offset = '0x%08X' % (int(item[2], 16) + self.VPDBaseAddress)
+ IsByteArray, ArrayList = ByteArrayForamt(item[-1])
+ Skuinfo = item[1]
+ if len(GlobalData.gSkuids) == 1 :
+ Skuinfo = GlobalData.gSkuids[0]
+ if IsByteArray:
+ FileWrite(File, "%s | %s | %s | %s | %s" % (item[0], Skuinfo, Offset, item[3], '{'))
+ for Array in ArrayList:
+ FileWrite(File, Array)
+ else:
+ FileWrite(File, "%s | %s | %s | %s | %s" % (item[0], Skuinfo, Offset, item[3], item[-1]))
+ FileWrite(File, gSubSectionEnd)
+ FileWrite(File, gSectionEnd)
+
+
+
+##
+# Reports platform information
+#
+# This class reports the whole platform information
+#
+class PlatformReport(object):
+ ##
+ # Constructor function for class PlatformReport
+ #
+ # This constructor function generates PlatformReport object a platform build.
+ # It generates report for platform summary, flash, global PCDs and detailed
+ # module information for modules involved in platform build.
+ #
+ # @param self The object pointer
+ # @param Wa Workspace context information
+ # @param MaList The list of modules in the platform build
+ #
+ def __init__(self, Wa, MaList, ReportType):
+ self._WorkspaceDir = Wa.WorkspaceDir
+ self.PlatformName = Wa.Name
+ self.PlatformDscPath = Wa.Platform
+ self.Architectures = " ".join(Wa.ArchList)
+ self.ToolChain = Wa.ToolChain
+ self.Target = Wa.BuildTarget
+ self.OutputPath = os.path.join(Wa.WorkspaceDir, Wa.OutputDir)
+ self.BuildEnvironment = platform.platform()
+
+ self.PcdReport = None
+ if "PCD" in ReportType:
+ self.PcdReport = PcdReport(Wa)
+
+ self.FdReportList = []
+ if "FLASH" in ReportType and Wa.FdfProfile and MaList is None:
+ for Fd in Wa.FdfProfile.FdDict:
+ self.FdReportList.append(FdReport(Wa.FdfProfile.FdDict[Fd], Wa))
+
+ self.PredictionReport = None
+ if "FIXED_ADDRESS" in ReportType or "EXECUTION_ORDER" in ReportType:
+ self.PredictionReport = PredictionReport(Wa)
+
+ self.DepexParser = None
+ if "DEPEX" in ReportType:
+ self.DepexParser = DepexParser(Wa)
+
+ self.ModuleReportList = []
+ if MaList is not None:
+ self._IsModuleBuild = True
+ for Ma in MaList:
+ self.ModuleReportList.append(ModuleReport(Ma, ReportType))
+ else:
+ self._IsModuleBuild = False
+ for Pa in Wa.AutoGenObjectList:
+ ModuleAutoGenList = []
+ for ModuleKey in Pa.Platform.Modules:
+ ModuleAutoGenList.append(Pa.Platform.Modules[ModuleKey].M)
+ if GlobalData.gFdfParser is not None:
+ if Pa.Arch in GlobalData.gFdfParser.Profile.InfDict:
+ INFList = GlobalData.gFdfParser.Profile.InfDict[Pa.Arch]
+ for InfName in INFList:
+ InfClass = PathClass(NormPath(InfName), Wa.WorkspaceDir, Pa.Arch)
+ Ma = ModuleAutoGen(Wa, InfClass, Pa.BuildTarget, Pa.ToolChain, Pa.Arch, Wa.MetaFile, Pa.DataPipe)
+ if Ma is None:
+ continue
+ if Ma not in ModuleAutoGenList:
+ ModuleAutoGenList.append(Ma)
+ for MGen in ModuleAutoGenList:
+ self.ModuleReportList.append(ModuleReport(MGen, ReportType))
+
+
+
+ ##
+ # Generate report for the whole platform.
+ #
+ # This function generates report for platform information.
+ # It comprises of platform summary, global PCD, flash and
+ # module list sections.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ # @param BuildDuration The total time to build the modules
+ # @param AutoGenTime The total time of AutoGen Phase
+ # @param MakeTime The total time of Make Phase
+ # @param GenFdsTime The total time of GenFds Phase
+ # @param ReportType The kind of report items in the final report file
+ #
+ def GenerateReport(self, File, BuildDuration, AutoGenTime, MakeTime, GenFdsTime, ReportType):
+ FileWrite(File, "Platform Summary")
+ FileWrite(File, "Platform Name: %s" % self.PlatformName)
+ FileWrite(File, "Platform DSC Path: %s" % self.PlatformDscPath)
+ FileWrite(File, "Architectures: %s" % self.Architectures)
+ FileWrite(File, "Tool Chain: %s" % self.ToolChain)
+ FileWrite(File, "Target: %s" % self.Target)
+ if GlobalData.gSkuids:
+ FileWrite(File, "SKUID: %s" % " ".join(GlobalData.gSkuids))
+ if GlobalData.gDefaultStores:
+ FileWrite(File, "DefaultStore: %s" % " ".join(GlobalData.gDefaultStores))
+ FileWrite(File, "Output Path: %s" % self.OutputPath)
+ FileWrite(File, "Build Environment: %s" % self.BuildEnvironment)
+ FileWrite(File, "Build Duration: %s" % BuildDuration)
+ if AutoGenTime:
+ FileWrite(File, "AutoGen Duration: %s" % AutoGenTime)
+ if MakeTime:
+ FileWrite(File, "Make Duration: %s" % MakeTime)
+ if GenFdsTime:
+ FileWrite(File, "GenFds Duration: %s" % GenFdsTime)
+ FileWrite(File, "Report Content: %s" % ", ".join(ReportType))
+
+ if GlobalData.MixedPcd:
+ FileWrite(File, gSectionStart)
+ FileWrite(File, "The following PCDs use different access methods:")
+ FileWrite(File, gSectionSep)
+ for PcdItem in GlobalData.MixedPcd:
+ FileWrite(File, "%s.%s" % (str(PcdItem[1]), str(PcdItem[0])))
+ FileWrite(File, gSectionEnd)
+
+ if not self._IsModuleBuild:
+ if "PCD" in ReportType:
+ self.PcdReport.GenerateReport(File, None)
+
+ if "FLASH" in ReportType:
+ for FdReportListItem in self.FdReportList:
+ FdReportListItem.GenerateReport(File)
+
+ for ModuleReportItem in self.ModuleReportList:
+ ModuleReportItem.GenerateReport(File, self.PcdReport, self.PredictionReport, self.DepexParser, ReportType)
+
+ if not self._IsModuleBuild:
+ if "EXECUTION_ORDER" in ReportType:
+ self.PredictionReport.GenerateReport(File, None)
+
+## BuildReport class
+#
+# This base class contain the routines to collect data and then
+# applies certain format to the output report
+#
+class BuildReport(object):
+ ##
+ # Constructor function for class BuildReport
+ #
+ # This constructor function generates BuildReport object a platform build.
+ # It generates report for platform summary, flash, global PCDs and detailed
+ # module information for modules involved in platform build.
+ #
+ # @param self The object pointer
+ # @param ReportFile The file name to save report file
+ # @param ReportType The kind of report items in the final report file
+ #
+ def __init__(self, ReportFile, ReportType):
+ self.ReportFile = ReportFile
+ if ReportFile:
+ self.ReportList = []
+ self.ReportType = []
+ if ReportType:
+ for ReportTypeItem in ReportType:
+ if ReportTypeItem not in self.ReportType:
+ self.ReportType.append(ReportTypeItem)
+ else:
+ self.ReportType = ["PCD", "LIBRARY", "BUILD_FLAGS", "DEPEX", "HASH", "FLASH", "FIXED_ADDRESS"]
+ ##
+ # Adds platform report to the list
+ #
+ # This function adds a platform report to the final report list.
+ #
+ # @param self The object pointer
+ # @param Wa Workspace context information
+ # @param MaList The list of modules in the platform build
+ #
+ def AddPlatformReport(self, Wa, MaList=None):
+ if self.ReportFile:
+ self.ReportList.append((Wa, MaList))
+
+ ##
+ # Generates the final report.
+ #
+ # This function generates platform build report. It invokes GenerateReport()
+ # method for every platform report in the list.
+ #
+ # @param self The object pointer
+ # @param BuildDuration The total time to build the modules
+ # @param AutoGenTime The total time of AutoGen phase
+ # @param MakeTime The total time of Make phase
+ # @param GenFdsTime The total time of GenFds phase
+ #
+ def GenerateReport(self, BuildDuration, AutoGenTime, MakeTime, GenFdsTime):
+ if self.ReportFile:
+ try:
+ File = []
+ for (Wa, MaList) in self.ReportList:
+ PlatformReport(Wa, MaList, self.ReportType).GenerateReport(File, BuildDuration, AutoGenTime, MakeTime, GenFdsTime, self.ReportType)
+ Content = FileLinesSplit(''.join(File), gLineMaxLength)
+ SaveFileOnChange(self.ReportFile, Content, False)
+ EdkLogger.quiet("Build report can be found at %s" % os.path.abspath(self.ReportFile))
+ except IOError:
+ EdkLogger.error(None, FILE_WRITE_FAILURE, ExtraData=self.ReportFile)
+ except:
+ EdkLogger.error("BuildReport", CODE_ERROR, "Unknown fatal error when generating build report", ExtraData=self.ReportFile, RaiseError=False)
+ EdkLogger.quiet("(Python %s on %s\n%s)" % (platform.python_version(), sys.platform, traceback.format_exc()))
+
+# This acts like the main() function for the script, unless it is 'import'ed into another script.
+if __name__ == '__main__':
+ pass
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/__init__.py
new file mode 100644
index 00000000..76fba8f0
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/__init__.py
@@ -0,0 +1,9 @@
+## @file
+# Python 'build' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/build.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/build.py
new file mode 100755
index 00000000..3790fbef
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/build.py
@@ -0,0 +1,2796 @@
+## @file
+# build a platform or a module
+#
+# Copyright (c) 2014, Hewlett-Packard Development Company, L.P.<BR>
+# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2018, Hewlett Packard Enterprise Development, L.P.<BR>
+# Copyright (c) 2020, ARM Limited. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+import os.path as path
+import sys
+import os
+import re
+import glob
+import time
+import platform
+import traceback
+import multiprocessing
+from threading import Thread,Event,BoundedSemaphore
+import threading
+from linecache import getlines
+from subprocess import Popen,PIPE, STDOUT
+from collections import OrderedDict, defaultdict
+
+from AutoGen.PlatformAutoGen import PlatformAutoGen
+from AutoGen.ModuleAutoGen import ModuleAutoGen
+from AutoGen.WorkspaceAutoGen import WorkspaceAutoGen
+from AutoGen.AutoGenWorker import AutoGenWorkerInProcess,AutoGenManager,\
+ LogAgent
+from AutoGen import GenMake
+from Common import Misc as Utils
+
+from Common.TargetTxtClassObject import TargetTxtDict
+from Common.ToolDefClassObject import ToolDefDict
+from buildoptions import MyOptionParser
+from Common.Misc import PathClass,SaveFileOnChange,RemoveDirectory
+from Common.StringUtils import NormPath
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from Common.BuildToolError import *
+from Common.DataType import *
+import Common.EdkLogger as EdkLogger
+
+from Workspace.WorkspaceDatabase import BuildDB
+
+from BuildReport import BuildReport
+from GenPatchPcdTable.GenPatchPcdTable import PeImageClass,parsePcdInfoFromMapFile
+from PatchPcdValue.PatchPcdValue import PatchBinaryFile
+
+import Common.GlobalData as GlobalData
+from GenFds.GenFds import GenFds, GenFdsApi
+import multiprocessing as mp
+from multiprocessing import Manager
+from AutoGen.DataPipe import MemoryDataPipe
+from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo, PlatformInfo
+from GenFds.FdfParser import FdfParser
+from AutoGen.IncludesAutoGen import IncludesAutoGen
+from GenFds.GenFds import resetFdsGlobalVariable
+from AutoGen.AutoGen import CalculatePriorityValue
+
+## standard targets of build command
+gSupportedTarget = ['all', 'genc', 'genmake', 'modules', 'libraries', 'fds', 'clean', 'cleanall', 'cleanlib', 'run']
+
+## build configuration file
+gBuildConfiguration = "target.txt"
+gToolsDefinition = "tools_def.txt"
+
+TemporaryTablePattern = re.compile(r'^_\d+_\d+_[a-fA-F0-9]+$')
+TmpTableDict = {}
+
+## Check environment PATH variable to make sure the specified tool is found
+#
+# If the tool is found in the PATH, then True is returned
+# Otherwise, False is returned
+#
+def IsToolInPath(tool):
+ if 'PATHEXT' in os.environ:
+ extns = os.environ['PATHEXT'].split(os.path.pathsep)
+ else:
+ extns = ('',)
+ for pathDir in os.environ['PATH'].split(os.path.pathsep):
+ for ext in extns:
+ if os.path.exists(os.path.join(pathDir, tool + ext)):
+ return True
+ return False
+
+## Check environment variables
+#
+# Check environment variables that must be set for build. Currently they are
+#
+# WORKSPACE The directory all packages/platforms start from
+# EDK_TOOLS_PATH The directory contains all tools needed by the build
+# PATH $(EDK_TOOLS_PATH)/Bin/<sys> must be set in PATH
+#
+# If any of above environment variable is not set or has error, the build
+# will be broken.
+#
+def CheckEnvVariable():
+ # check WORKSPACE
+ if "WORKSPACE" not in os.environ:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
+ ExtraData="WORKSPACE")
+
+ WorkspaceDir = os.path.normcase(os.path.normpath(os.environ["WORKSPACE"]))
+ if not os.path.exists(WorkspaceDir):
+ EdkLogger.error("build", FILE_NOT_FOUND, "WORKSPACE doesn't exist", ExtraData=WorkspaceDir)
+ elif ' ' in WorkspaceDir:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in WORKSPACE path",
+ ExtraData=WorkspaceDir)
+ os.environ["WORKSPACE"] = WorkspaceDir
+
+ # set multiple workspace
+ PackagesPath = os.getenv("PACKAGES_PATH")
+ mws.setWs(WorkspaceDir, PackagesPath)
+ if mws.PACKAGES_PATH:
+ for Path in mws.PACKAGES_PATH:
+ if not os.path.exists(Path):
+ EdkLogger.error("build", FILE_NOT_FOUND, "One Path in PACKAGES_PATH doesn't exist", ExtraData=Path)
+ elif ' ' in Path:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in PACKAGES_PATH", ExtraData=Path)
+
+
+ os.environ["EDK_TOOLS_PATH"] = os.path.normcase(os.environ["EDK_TOOLS_PATH"])
+
+ # check EDK_TOOLS_PATH
+ if "EDK_TOOLS_PATH" not in os.environ:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
+ ExtraData="EDK_TOOLS_PATH")
+
+ # check PATH
+ if "PATH" not in os.environ:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
+ ExtraData="PATH")
+
+ GlobalData.gWorkspace = WorkspaceDir
+
+ GlobalData.gGlobalDefines["WORKSPACE"] = WorkspaceDir
+ GlobalData.gGlobalDefines["EDK_TOOLS_PATH"] = os.environ["EDK_TOOLS_PATH"]
+
+## Get normalized file path
+#
+# Convert the path to be local format, and remove the WORKSPACE path at the
+# beginning if the file path is given in full path.
+#
+# @param FilePath File path to be normalized
+# @param Workspace Workspace path which the FilePath will be checked against
+#
+# @retval string The normalized file path
+#
+def NormFile(FilePath, Workspace):
+ # check if the path is absolute or relative
+ if os.path.isabs(FilePath):
+ FileFullPath = os.path.normpath(FilePath)
+ else:
+ FileFullPath = os.path.normpath(mws.join(Workspace, FilePath))
+ Workspace = mws.getWs(Workspace, FilePath)
+
+ # check if the file path exists or not
+ if not os.path.isfile(FileFullPath):
+ EdkLogger.error("build", FILE_NOT_FOUND, ExtraData="\t%s (Please give file in absolute path or relative to WORKSPACE)" % FileFullPath)
+
+ # remove workspace directory from the beginning part of the file path
+ if Workspace[-1] in ["\\", "/"]:
+ return FileFullPath[len(Workspace):]
+ else:
+ return FileFullPath[(len(Workspace) + 1):]
+
+## Get the output of an external program
+#
+# This is the entrance method of thread reading output of an external program and
+# putting them in STDOUT/STDERR of current program.
+#
+# @param From The stream message read from
+# @param To The stream message put on
+# @param ExitFlag The flag used to indicate stopping reading
+#
+def ReadMessage(From, To, ExitFlag,MemTo=None):
+ while True:
+ # read one line a time
+ Line = From.readline()
+ # empty string means "end"
+ if Line is not None and Line != b"":
+ LineStr = Line.rstrip().decode(encoding='utf-8', errors='ignore')
+ if MemTo is not None:
+ if "Note: including file:" == LineStr.lstrip()[:21]:
+ MemTo.append(LineStr)
+ else:
+ To(LineStr)
+ MemTo.append(LineStr)
+ else:
+ To(LineStr)
+ else:
+ break
+ if ExitFlag.isSet():
+ break
+
+class MakeSubProc(Popen):
+ def __init__(self,*args, **argv):
+ super(MakeSubProc,self).__init__(*args, **argv)
+ self.ProcOut = []
+
+## Launch an external program
+#
+# This method will call subprocess.Popen to execute an external program with
+# given options in specified directory. Because of the dead-lock issue during
+# redirecting output of the external program, threads are used to to do the
+# redirection work.
+#
+# @param Command A list or string containing the call of the program
+# @param WorkingDir The directory in which the program will be running
+#
+def LaunchCommand(Command, WorkingDir,ModuleAuto = None):
+ BeginTime = time.time()
+ # if working directory doesn't exist, Popen() will raise an exception
+ if not os.path.isdir(WorkingDir):
+ EdkLogger.error("build", FILE_NOT_FOUND, ExtraData=WorkingDir)
+
+ # Command is used as the first Argument in following Popen().
+ # It could be a string or sequence. We find that if command is a string in following Popen(),
+ # ubuntu may fail with an error message that the command is not found.
+ # So here we may need convert command from string to list instance.
+ if platform.system() != 'Windows':
+ if not isinstance(Command, list):
+ Command = Command.split()
+ Command = ' '.join(Command)
+
+ if platform.system() != 'Windows':
+ EdkLogger.info("Launching: '%s'; CWD=%s" % (Command, WorkingDir));
+ else:
+ EdkLogger.info("Launching: '%s'; CWD=%s" % ("' '".join(Command), WorkingDir));
+ Proc = None
+ EndOfProcedure = None
+ try:
+ # launch the command
+ Proc = MakeSubProc(Command, stdout=PIPE, stderr=STDOUT, env=os.environ, cwd=WorkingDir, bufsize=-1, shell=True)
+
+ # launch two threads to read the STDOUT and STDERR
+ EndOfProcedure = Event()
+ EndOfProcedure.clear()
+ if Proc.stdout:
+ StdOutThread = Thread(target=ReadMessage, args=(Proc.stdout, EdkLogger.info, EndOfProcedure,Proc.ProcOut))
+ StdOutThread.setName("STDOUT-Redirector")
+ StdOutThread.setDaemon(False)
+ StdOutThread.start()
+
+
+ # waiting for program exit
+ Proc.wait()
+ except: # in case of aborting
+ # terminate the threads redirecting the program output
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ if EndOfProcedure is not None:
+ EndOfProcedure.set()
+ if Proc is None:
+ if not isinstance(Command, type("")):
+ Command = " ".join(Command)
+ EdkLogger.error("build", COMMAND_FAILURE, "Failed to start command", ExtraData="%s [%s]" % (Command, WorkingDir))
+
+ if Proc.stdout:
+ StdOutThread.join()
+
+ # check the return code of the program
+ if Proc.returncode != 0:
+ if not isinstance(Command, type("")):
+ Command = " ".join(Command)
+ # print out the Response file and its content when make failure
+ RespFile = os.path.join(WorkingDir, 'OUTPUT', 'respfilelist.txt')
+ if os.path.isfile(RespFile):
+ f = open(RespFile)
+ RespContent = f.read()
+ f.close()
+ EdkLogger.info(RespContent)
+
+ EdkLogger.error("build", COMMAND_FAILURE, ExtraData="%s [%s]" % (Command, WorkingDir))
+ if ModuleAuto:
+ iau = IncludesAutoGen(WorkingDir,ModuleAuto)
+ if ModuleAuto.ToolChainFamily == TAB_COMPILER_MSFT:
+ iau.CreateDepsFileForMsvc(Proc.ProcOut)
+ else:
+ iau.UpdateDepsFileforNonMsvc()
+ iau.UpdateDepsFileforTrim()
+ iau.CreateModuleDeps()
+ iau.CreateDepsInclude()
+ iau.CreateDepsTarget()
+ return "%dms" % (int(round((time.time() - BeginTime) * 1000)))
+
+## The smallest unit that can be built in multi-thread build mode
+#
+# This is the base class of build unit. The "Obj" parameter must provide
+# __str__(), __eq__() and __hash__() methods. Otherwise there could be build units
+# missing build.
+#
+# Currently the "Obj" should be only ModuleAutoGen or PlatformAutoGen objects.
+#
+class BuildUnit:
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Obj The object the build is working on
+ # @param Target The build target name, one of gSupportedTarget
+ # @param Dependency The BuildUnit(s) which must be completed in advance
+ # @param WorkingDir The directory build command starts in
+ #
+ def __init__(self, Obj, BuildCommand, Target, Dependency, WorkingDir="."):
+ self.BuildObject = Obj
+ self.Dependency = Dependency
+ self.WorkingDir = WorkingDir
+ self.Target = Target
+ self.BuildCommand = BuildCommand
+ if not BuildCommand:
+ EdkLogger.error("build", OPTION_MISSING,
+ "No build command found for this module. "
+ "Please check your setting of %s_%s_%s_MAKE_PATH in Conf/tools_def.txt file." %
+ (Obj.BuildTarget, Obj.ToolChain, Obj.Arch),
+ ExtraData=str(Obj))
+
+
+ ## str() method
+ #
+ # It just returns the string representation of self.BuildObject
+ #
+ # @param self The object pointer
+ #
+ def __str__(self):
+ return str(self.BuildObject)
+
+ ## "==" operator method
+ #
+ # It just compares self.BuildObject with "Other". So self.BuildObject must
+ # provide its own __eq__() method.
+ #
+ # @param self The object pointer
+ # @param Other The other BuildUnit object compared to
+ #
+ def __eq__(self, Other):
+ return Other and self.BuildObject == Other.BuildObject \
+ and Other.BuildObject \
+ and self.BuildObject.Arch == Other.BuildObject.Arch
+
+ ## hash() method
+ #
+ # It just returns the hash value of self.BuildObject which must be hashable.
+ #
+ # @param self The object pointer
+ #
+ def __hash__(self):
+ return hash(self.BuildObject) + hash(self.BuildObject.Arch)
+
+ def __repr__(self):
+ return repr(self.BuildObject)
+
+## The smallest module unit that can be built by nmake/make command in multi-thread build mode
+#
+# This class is for module build by nmake/make build system. The "Obj" parameter
+# must provide __str__(), __eq__() and __hash__() methods. Otherwise there could
+# be make units missing build.
+#
+# Currently the "Obj" should be only ModuleAutoGen object.
+#
+class ModuleMakeUnit(BuildUnit):
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Obj The ModuleAutoGen object the build is working on
+ # @param Target The build target name, one of gSupportedTarget
+ #
+ def __init__(self, Obj, BuildCommand,Target):
+ Dependency = [ModuleMakeUnit(La, BuildCommand,Target) for La in Obj.LibraryAutoGenList]
+ BuildUnit.__init__(self, Obj, BuildCommand, Target, Dependency, Obj.MakeFileDir)
+ if Target in [None, "", "all"]:
+ self.Target = "tbuild"
+
+## The smallest platform unit that can be built by nmake/make command in multi-thread build mode
+#
+# This class is for platform build by nmake/make build system. The "Obj" parameter
+# must provide __str__(), __eq__() and __hash__() methods. Otherwise there could
+# be make units missing build.
+#
+# Currently the "Obj" should be only PlatformAutoGen object.
+#
+class PlatformMakeUnit(BuildUnit):
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Obj The PlatformAutoGen object the build is working on
+ # @param Target The build target name, one of gSupportedTarget
+ #
+ def __init__(self, Obj, BuildCommand, Target):
+ Dependency = [ModuleMakeUnit(Lib, BuildCommand, Target) for Lib in self.BuildObject.LibraryAutoGenList]
+ Dependency.extend([ModuleMakeUnit(Mod, BuildCommand,Target) for Mod in self.BuildObject.ModuleAutoGenList])
+ BuildUnit.__init__(self, Obj, BuildCommand, Target, Dependency, Obj.MakeFileDir)
+
+## The class representing the task of a module build or platform build
+#
+# This class manages the build tasks in multi-thread build mode. Its jobs include
+# scheduling thread running, catching thread error, monitor the thread status, etc.
+#
+class BuildTask:
+ # queue for tasks waiting for schedule
+ _PendingQueue = OrderedDict()
+ _PendingQueueLock = threading.Lock()
+
+ # queue for tasks ready for running
+ _ReadyQueue = OrderedDict()
+ _ReadyQueueLock = threading.Lock()
+
+ # queue for run tasks
+ _RunningQueue = OrderedDict()
+ _RunningQueueLock = threading.Lock()
+
+ # queue containing all build tasks, in case duplicate build
+ _TaskQueue = OrderedDict()
+
+ # flag indicating error occurs in a running thread
+ _ErrorFlag = threading.Event()
+ _ErrorFlag.clear()
+ _ErrorMessage = ""
+
+ # BoundedSemaphore object used to control the number of running threads
+ _Thread = None
+
+ # flag indicating if the scheduler is started or not
+ _SchedulerStopped = threading.Event()
+ _SchedulerStopped.set()
+
+ ## Start the task scheduler thread
+ #
+ # @param MaxThreadNumber The maximum thread number
+ # @param ExitFlag Flag used to end the scheduler
+ #
+ @staticmethod
+ def StartScheduler(MaxThreadNumber, ExitFlag):
+ SchedulerThread = Thread(target=BuildTask.Scheduler, args=(MaxThreadNumber, ExitFlag))
+ SchedulerThread.setName("Build-Task-Scheduler")
+ SchedulerThread.setDaemon(False)
+ SchedulerThread.start()
+ # wait for the scheduler to be started, especially useful in Linux
+ while not BuildTask.IsOnGoing():
+ time.sleep(0.01)
+
+ ## Scheduler method
+ #
+ # @param MaxThreadNumber The maximum thread number
+ # @param ExitFlag Flag used to end the scheduler
+ #
+ @staticmethod
+ def Scheduler(MaxThreadNumber, ExitFlag):
+ BuildTask._SchedulerStopped.clear()
+ try:
+ # use BoundedSemaphore to control the maximum running threads
+ BuildTask._Thread = BoundedSemaphore(MaxThreadNumber)
+ #
+ # scheduling loop, which will exits when no pending/ready task and
+ # indicated to do so, or there's error in running thread
+ #
+ while (len(BuildTask._PendingQueue) > 0 or len(BuildTask._ReadyQueue) > 0 \
+ or not ExitFlag.isSet()) and not BuildTask._ErrorFlag.isSet():
+ EdkLogger.debug(EdkLogger.DEBUG_8, "Pending Queue (%d), Ready Queue (%d)"
+ % (len(BuildTask._PendingQueue), len(BuildTask._ReadyQueue)))
+
+ # get all pending tasks
+ BuildTask._PendingQueueLock.acquire()
+ BuildObjectList = list(BuildTask._PendingQueue.keys())
+ #
+ # check if their dependency is resolved, and if true, move them
+ # into ready queue
+ #
+ for BuildObject in BuildObjectList:
+ Bt = BuildTask._PendingQueue[BuildObject]
+ if Bt.IsReady():
+ BuildTask._ReadyQueue[BuildObject] = BuildTask._PendingQueue.pop(BuildObject)
+ BuildTask._PendingQueueLock.release()
+
+ # launch build thread until the maximum number of threads is reached
+ while not BuildTask._ErrorFlag.isSet():
+ # empty ready queue, do nothing further
+ if len(BuildTask._ReadyQueue) == 0:
+ break
+
+ # wait for active thread(s) exit
+ BuildTask._Thread.acquire(True)
+
+ # start a new build thread
+ Bo, Bt = BuildTask._ReadyQueue.popitem()
+
+ # move into running queue
+ BuildTask._RunningQueueLock.acquire()
+ BuildTask._RunningQueue[Bo] = Bt
+ BuildTask._RunningQueueLock.release()
+
+ Bt.Start()
+ # avoid tense loop
+ time.sleep(0.01)
+
+ # avoid tense loop
+ time.sleep(0.01)
+
+ # wait for all running threads exit
+ if BuildTask._ErrorFlag.isSet():
+ EdkLogger.quiet("\nWaiting for all build threads exit...")
+ # while not BuildTask._ErrorFlag.isSet() and \
+ while len(BuildTask._RunningQueue) > 0:
+ EdkLogger.verbose("Waiting for thread ending...(%d)" % len(BuildTask._RunningQueue))
+ EdkLogger.debug(EdkLogger.DEBUG_8, "Threads [%s]" % ", ".join(Th.getName() for Th in threading.enumerate()))
+ # avoid tense loop
+ time.sleep(0.1)
+ except BaseException as X:
+ #
+ # TRICK: hide the output of threads left running, so that the user can
+ # catch the error message easily
+ #
+ EdkLogger.SetLevel(EdkLogger.ERROR)
+ BuildTask._ErrorFlag.set()
+ BuildTask._ErrorMessage = "build thread scheduler error\n\t%s" % str(X)
+
+ BuildTask._PendingQueue.clear()
+ BuildTask._ReadyQueue.clear()
+ BuildTask._RunningQueue.clear()
+ BuildTask._TaskQueue.clear()
+ BuildTask._SchedulerStopped.set()
+
+ ## Wait for all running method exit
+ #
+ @staticmethod
+ def WaitForComplete():
+ BuildTask._SchedulerStopped.wait()
+
+ ## Check if the scheduler is running or not
+ #
+ @staticmethod
+ def IsOnGoing():
+ return not BuildTask._SchedulerStopped.isSet()
+
+ ## Abort the build
+ @staticmethod
+ def Abort():
+ if BuildTask.IsOnGoing():
+ BuildTask._ErrorFlag.set()
+ BuildTask.WaitForComplete()
+
+ ## Check if there's error in running thread
+ #
+ # Since the main thread cannot catch exceptions in other thread, we have to
+ # use threading.Event to communicate this formation to main thread.
+ #
+ @staticmethod
+ def HasError():
+ return BuildTask._ErrorFlag.isSet()
+
+ ## Get error message in running thread
+ #
+ # Since the main thread cannot catch exceptions in other thread, we have to
+ # use a static variable to communicate this message to main thread.
+ #
+ @staticmethod
+ def GetErrorMessage():
+ return BuildTask._ErrorMessage
+
+ ## Factory method to create a BuildTask object
+ #
+ # This method will check if a module is building or has been built. And if
+ # true, just return the associated BuildTask object in the _TaskQueue. If
+ # not, create and return a new BuildTask object. The new BuildTask object
+ # will be appended to the _PendingQueue for scheduling later.
+ #
+ # @param BuildItem A BuildUnit object representing a build object
+ # @param Dependency The dependent build object of BuildItem
+ #
+ @staticmethod
+ def New(BuildItem, Dependency=None):
+ if BuildItem in BuildTask._TaskQueue:
+ Bt = BuildTask._TaskQueue[BuildItem]
+ return Bt
+
+ Bt = BuildTask()
+ Bt._Init(BuildItem, Dependency)
+ BuildTask._TaskQueue[BuildItem] = Bt
+
+ BuildTask._PendingQueueLock.acquire()
+ BuildTask._PendingQueue[BuildItem] = Bt
+ BuildTask._PendingQueueLock.release()
+
+ return Bt
+
+ ## The real constructor of BuildTask
+ #
+ # @param BuildItem A BuildUnit object representing a build object
+ # @param Dependency The dependent build object of BuildItem
+ #
+ def _Init(self, BuildItem, Dependency=None):
+ self.BuildItem = BuildItem
+
+ self.DependencyList = []
+ if Dependency is None:
+ Dependency = BuildItem.Dependency
+ else:
+ Dependency.extend(BuildItem.Dependency)
+ self.AddDependency(Dependency)
+ # flag indicating build completes, used to avoid unnecessary re-build
+ self.CompleteFlag = False
+
+ ## Check if all dependent build tasks are completed or not
+ #
+ def IsReady(self):
+ ReadyFlag = True
+ for Dep in self.DependencyList:
+ if Dep.CompleteFlag == True:
+ continue
+ ReadyFlag = False
+ break
+
+ return ReadyFlag
+
+ ## Add dependent build task
+ #
+ # @param Dependency The list of dependent build objects
+ #
+ def AddDependency(self, Dependency):
+ for Dep in Dependency:
+ if not Dep.BuildObject.IsBinaryModule and not Dep.BuildObject.CanSkipbyCache(GlobalData.gModuleCacheHit):
+ self.DependencyList.append(BuildTask.New(Dep)) # BuildTask list
+
+ ## The thread wrapper of LaunchCommand function
+ #
+ # @param Command A list or string contains the call of the command
+ # @param WorkingDir The directory in which the program will be running
+ #
+ def _CommandThread(self, Command, WorkingDir):
+ try:
+ self.BuildItem.BuildObject.BuildTime = LaunchCommand(Command, WorkingDir,self.BuildItem.BuildObject)
+ self.CompleteFlag = True
+
+ # Run hash operation post dependency to account for libs
+ # Run if --hash or --binary-destination
+ if GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
+ self.BuildItem.BuildObject.GenModuleHash()
+ if GlobalData.gBinCacheDest:
+ self.BuildItem.BuildObject.GenCMakeHash()
+
+ except:
+ #
+ # TRICK: hide the output of threads left running, so that the user can
+ # catch the error message easily
+ #
+ if not BuildTask._ErrorFlag.isSet():
+ GlobalData.gBuildingModule = "%s [%s, %s, %s]" % (str(self.BuildItem.BuildObject),
+ self.BuildItem.BuildObject.Arch,
+ self.BuildItem.BuildObject.ToolChain,
+ self.BuildItem.BuildObject.BuildTarget
+ )
+ EdkLogger.SetLevel(EdkLogger.ERROR)
+ BuildTask._ErrorFlag.set()
+ BuildTask._ErrorMessage = "%s broken\n %s [%s]" % \
+ (threading.currentThread().getName(), Command, WorkingDir)
+
+ # indicate there's a thread is available for another build task
+ BuildTask._RunningQueueLock.acquire()
+ BuildTask._RunningQueue.pop(self.BuildItem)
+ BuildTask._RunningQueueLock.release()
+ BuildTask._Thread.release()
+
+ ## Start build task thread
+ #
+ def Start(self):
+ EdkLogger.quiet("Building ... %s" % repr(self.BuildItem))
+ Command = self.BuildItem.BuildCommand + [self.BuildItem.Target]
+ self.BuildTread = Thread(target=self._CommandThread, args=(Command, self.BuildItem.WorkingDir))
+ self.BuildTread.setName("build thread")
+ self.BuildTread.setDaemon(False)
+ self.BuildTread.start()
+
+## The class contains the information related to EFI image
+#
+class PeImageInfo():
+ ## Constructor
+ #
+ # Constructor will load all required image information.
+ #
+ # @param BaseName The full file path of image.
+ # @param Guid The GUID for image.
+ # @param Arch Arch of this image.
+ # @param OutputDir The output directory for image.
+ # @param DebugDir The debug directory for image.
+ # @param ImageClass PeImage Information
+ #
+ def __init__(self, BaseName, Guid, Arch, OutputDir, DebugDir, ImageClass):
+ self.BaseName = BaseName
+ self.Guid = Guid
+ self.Arch = Arch
+ self.OutputDir = OutputDir
+ self.DebugDir = DebugDir
+ self.Image = ImageClass
+ self.Image.Size = (self.Image.Size // 0x1000 + 1) * 0x1000
+
+## The class implementing the EDK2 build process
+#
+# The build process includes:
+# 1. Load configuration from target.txt and tools_def.txt in $(WORKSPACE)/Conf
+# 2. Parse DSC file of active platform
+# 3. Parse FDF file if any
+# 4. Establish build database, including parse all other files (module, package)
+# 5. Create AutoGen files (C code file, depex file, makefile) if necessary
+# 6. Call build command
+#
+class Build():
+ ## Constructor
+ #
+ # Constructor will load all necessary configurations, parse platform, modules
+ # and packages and the establish a database for AutoGen.
+ #
+ # @param Target The build command target, one of gSupportedTarget
+ # @param WorkspaceDir The directory of workspace
+ # @param BuildOptions Build options passed from command line
+ #
+ def __init__(self, Target, WorkspaceDir, BuildOptions,log_q):
+ self.WorkspaceDir = WorkspaceDir
+ self.Target = Target
+ self.PlatformFile = BuildOptions.PlatformFile
+ self.ModuleFile = BuildOptions.ModuleFile
+ self.ArchList = BuildOptions.TargetArch
+ self.ToolChainList = BuildOptions.ToolChain
+ self.BuildTargetList= BuildOptions.BuildTarget
+ self.Fdf = BuildOptions.FdfFile
+ self.FdList = BuildOptions.RomImage
+ self.FvList = BuildOptions.FvImage
+ self.CapList = BuildOptions.CapName
+ self.SilentMode = BuildOptions.SilentMode
+ self.ThreadNumber = 1
+ self.SkipAutoGen = BuildOptions.SkipAutoGen
+ self.Reparse = BuildOptions.Reparse
+ self.SkuId = BuildOptions.SkuId
+ if self.SkuId:
+ GlobalData.gSKUID_CMD = self.SkuId
+ self.ConfDirectory = BuildOptions.ConfDirectory
+ self.SpawnMode = True
+ self.BuildReport = BuildReport(BuildOptions.ReportFile, BuildOptions.ReportType)
+ self.AutoGenTime = 0
+ self.MakeTime = 0
+ self.GenFdsTime = 0
+ self.MakeFileName = ""
+ TargetObj = TargetTxtDict()
+ ToolDefObj = ToolDefDict((os.path.join(os.getenv("WORKSPACE"),"Conf")))
+ self.TargetTxt = TargetObj.Target
+ self.ToolDef = ToolDefObj.ToolDef
+ GlobalData.BuildOptionPcd = BuildOptions.OptionPcd if BuildOptions.OptionPcd else []
+ #Set global flag for build mode
+ GlobalData.gIgnoreSource = BuildOptions.IgnoreSources
+ GlobalData.gUseHashCache = BuildOptions.UseHashCache
+ GlobalData.gBinCacheDest = BuildOptions.BinCacheDest
+ GlobalData.gBinCacheSource = BuildOptions.BinCacheSource
+ GlobalData.gEnableGenfdsMultiThread = not BuildOptions.NoGenfdsMultiThread
+ GlobalData.gDisableIncludePathCheck = BuildOptions.DisableIncludePathCheck
+
+ if GlobalData.gBinCacheDest and not GlobalData.gUseHashCache:
+ EdkLogger.error("build", OPTION_NOT_SUPPORTED, ExtraData="--binary-destination must be used together with --hash.")
+
+ if GlobalData.gBinCacheSource and not GlobalData.gUseHashCache:
+ EdkLogger.error("build", OPTION_NOT_SUPPORTED, ExtraData="--binary-source must be used together with --hash.")
+
+ if GlobalData.gBinCacheDest and GlobalData.gBinCacheSource:
+ EdkLogger.error("build", OPTION_NOT_SUPPORTED, ExtraData="--binary-destination can not be used together with --binary-source.")
+
+ if GlobalData.gBinCacheSource:
+ BinCacheSource = os.path.normpath(GlobalData.gBinCacheSource)
+ if not os.path.isabs(BinCacheSource):
+ BinCacheSource = mws.join(self.WorkspaceDir, BinCacheSource)
+ GlobalData.gBinCacheSource = BinCacheSource
+ else:
+ if GlobalData.gBinCacheSource is not None:
+ EdkLogger.error("build", OPTION_VALUE_INVALID, ExtraData="Invalid value of option --binary-source.")
+
+ if GlobalData.gBinCacheDest:
+ BinCacheDest = os.path.normpath(GlobalData.gBinCacheDest)
+ if not os.path.isabs(BinCacheDest):
+ BinCacheDest = mws.join(self.WorkspaceDir, BinCacheDest)
+ GlobalData.gBinCacheDest = BinCacheDest
+ else:
+ if GlobalData.gBinCacheDest is not None:
+ EdkLogger.error("build", OPTION_VALUE_INVALID, ExtraData="Invalid value of option --binary-destination.")
+
+ GlobalData.gDatabasePath = os.path.normpath(os.path.join(GlobalData.gConfDirectory, GlobalData.gDatabasePath))
+ if not os.path.exists(os.path.join(GlobalData.gConfDirectory, '.cache')):
+ os.makedirs(os.path.join(GlobalData.gConfDirectory, '.cache'))
+ self.Db = BuildDB
+ self.BuildDatabase = self.Db.BuildObject
+ self.Platform = None
+ self.ToolChainFamily = None
+ self.LoadFixAddress = 0
+ self.UniFlag = BuildOptions.Flag
+ self.BuildModules = []
+ self.HashSkipModules = []
+ self.Db_Flag = False
+ self.LaunchPrebuildFlag = False
+ self.PlatformBuildPath = os.path.join(GlobalData.gConfDirectory, '.cache', '.PlatformBuild')
+ if BuildOptions.CommandLength:
+ GlobalData.gCommandMaxLength = BuildOptions.CommandLength
+
+ # print dot character during doing some time-consuming work
+ self.Progress = Utils.Progressor()
+ # print current build environment and configuration
+ EdkLogger.quiet("%-16s = %s" % ("WORKSPACE", os.environ["WORKSPACE"]))
+ if "PACKAGES_PATH" in os.environ:
+ # WORKSPACE env has been converted before. Print the same path style with WORKSPACE env.
+ EdkLogger.quiet("%-16s = %s" % ("PACKAGES_PATH", os.path.normcase(os.path.normpath(os.environ["PACKAGES_PATH"]))))
+ EdkLogger.quiet("%-16s = %s" % ("EDK_TOOLS_PATH", os.environ["EDK_TOOLS_PATH"]))
+ if "EDK_TOOLS_BIN" in os.environ:
+ # Print the same path style with WORKSPACE env.
+ EdkLogger.quiet("%-16s = %s" % ("EDK_TOOLS_BIN", os.path.normcase(os.path.normpath(os.environ["EDK_TOOLS_BIN"]))))
+ EdkLogger.quiet("%-16s = %s" % ("CONF_PATH", GlobalData.gConfDirectory))
+ if "PYTHON3_ENABLE" in os.environ:
+ PYTHON3_ENABLE = os.environ["PYTHON3_ENABLE"]
+ if PYTHON3_ENABLE != "TRUE":
+ PYTHON3_ENABLE = "FALSE"
+ EdkLogger.quiet("%-16s = %s" % ("PYTHON3_ENABLE", PYTHON3_ENABLE))
+ if "PYTHON_COMMAND" in os.environ:
+ EdkLogger.quiet("%-16s = %s" % ("PYTHON_COMMAND", os.environ["PYTHON_COMMAND"]))
+ self.InitPreBuild()
+ self.InitPostBuild()
+ if self.Prebuild:
+ EdkLogger.quiet("%-16s = %s" % ("PREBUILD", self.Prebuild))
+ if self.Postbuild:
+ EdkLogger.quiet("%-16s = %s" % ("POSTBUILD", self.Postbuild))
+ if self.Prebuild:
+ self.LaunchPrebuild()
+ TargetObj = TargetTxtDict()
+ ToolDefObj = ToolDefDict((os.path.join(os.getenv("WORKSPACE"), "Conf")))
+ self.TargetTxt = TargetObj.Target
+ self.ToolDef = ToolDefObj.ToolDef
+ if not (self.LaunchPrebuildFlag and os.path.exists(self.PlatformBuildPath)):
+ self.InitBuild()
+
+ self.AutoGenMgr = None
+ EdkLogger.info("")
+ os.chdir(self.WorkspaceDir)
+ self.log_q = log_q
+ GlobalData.file_lock = mp.Lock()
+ # Init cache data for local only
+ GlobalData.gPackageHashFile = dict()
+ GlobalData.gModulePreMakeCacheStatus = dict()
+ GlobalData.gModuleMakeCacheStatus = dict()
+ GlobalData.gHashChainStatus = dict()
+ GlobalData.gCMakeHashFile = dict()
+ GlobalData.gModuleHashFile = dict()
+ GlobalData.gFileHashDict = dict()
+ GlobalData.gModuleAllCacheStatus = set()
+ GlobalData.gModuleCacheHit = set()
+
+ def StartAutoGen(self,mqueue, DataPipe,SkipAutoGen,PcdMaList,cqueue):
+ try:
+ if SkipAutoGen:
+ return True,0
+ feedback_q = mp.Queue()
+ error_event = mp.Event()
+ FfsCmd = DataPipe.Get("FfsCommand")
+ if FfsCmd is None:
+ FfsCmd = {}
+ GlobalData.FfsCmd = FfsCmd
+ auto_workers = [AutoGenWorkerInProcess(mqueue,DataPipe.dump_file,feedback_q,GlobalData.file_lock,cqueue,self.log_q,error_event) for _ in range(self.ThreadNumber)]
+ self.AutoGenMgr = AutoGenManager(auto_workers,feedback_q,error_event)
+ self.AutoGenMgr.start()
+ for w in auto_workers:
+ w.start()
+ if PcdMaList is not None:
+ for PcdMa in PcdMaList:
+ # SourceFileList calling sequence impact the makefile string sequence.
+ # Create cached SourceFileList here to unify its calling sequence for both
+ # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.
+ RetVal = PcdMa.SourceFileList
+ # Force cache miss for PCD driver
+ if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and self.Target in [None, "", "all"]:
+ cqueue.put((PcdMa.MetaFile.Path, PcdMa.Arch, "PreMakeCache", False))
+
+ PcdMa.CreateCodeFile(False)
+ PcdMa.CreateMakeFile(False,GenFfsList = DataPipe.Get("FfsCommand").get((PcdMa.MetaFile.Path, PcdMa.Arch),[]))
+ PcdMa.CreateAsBuiltInf()
+ # Force cache miss for PCD driver
+ if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:
+ cqueue.put((PcdMa.MetaFile.Path, PcdMa.Arch, "MakeCache", False))
+
+ self.AutoGenMgr.join()
+ rt = self.AutoGenMgr.Status
+ err = 0
+ if not rt:
+ err = UNKNOWN_ERROR
+ return rt, err
+ except FatalError as e:
+ return False, e.args[0]
+ except:
+ return False, UNKNOWN_ERROR
+
+ ## Add TOOLCHAIN and FAMILY declared in DSC [BuildOptions] to ToolsDefTxtDatabase.
+ #
+ # Loop through the set of build targets, tool chains, and archs provided on either
+ # the command line or in target.txt to discover FAMILY and TOOLCHAIN delclarations
+ # in [BuildOptions] sections that may be within !if expressions that may use
+ # $(TARGET), $(TOOLCHAIN), $(TOOLCHAIN_TAG), or $(ARCH) operands.
+ #
+ def GetToolChainAndFamilyFromDsc (self, File):
+ SavedGlobalDefines = GlobalData.gGlobalDefines.copy()
+ for BuildTarget in self.BuildTargetList:
+ GlobalData.gGlobalDefines['TARGET'] = BuildTarget
+ for BuildToolChain in self.ToolChainList:
+ GlobalData.gGlobalDefines['TOOLCHAIN'] = BuildToolChain
+ GlobalData.gGlobalDefines['TOOL_CHAIN_TAG'] = BuildToolChain
+ for BuildArch in self.ArchList:
+ GlobalData.gGlobalDefines['ARCH'] = BuildArch
+ dscobj = self.BuildDatabase[File, BuildArch]
+ for KeyFamily, Key, KeyCodeBase in dscobj.BuildOptions:
+ try:
+ Target, ToolChain, Arch, Tool, Attr = Key.split('_')
+ except:
+ continue
+ if ToolChain == TAB_STAR or Attr != TAB_TOD_DEFINES_FAMILY:
+ continue
+ try:
+ Family = dscobj.BuildOptions[(KeyFamily, Key, KeyCodeBase)]
+ Family = Family.strip().strip('=').strip()
+ except:
+ continue
+ if TAB_TOD_DEFINES_FAMILY not in self.ToolDef.ToolsDefTxtDatabase:
+ self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY] = {}
+ if ToolChain not in self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]:
+ self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][ToolChain] = Family
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY not in self.ToolDef.ToolsDefTxtDatabase:
+ self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY] = {}
+ if ToolChain not in self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY]:
+ self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][ToolChain] = Family
+ if TAB_TOD_DEFINES_TOOL_CHAIN_TAG not in self.ToolDef.ToolsDefTxtDatabase:
+ self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] = []
+ if ToolChain not in self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG]:
+ self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG].append(ToolChain)
+ GlobalData.gGlobalDefines = SavedGlobalDefines
+
+ ## Load configuration
+ #
+ # This method will parse target.txt and get the build configurations.
+ #
+ def LoadConfiguration(self):
+
+ # if no ARCH given in command line, get it from target.txt
+ if not self.ArchList:
+ self.ArchList = self.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_TARGET_ARCH]
+ self.ArchList = tuple(self.ArchList)
+
+ # if no build target given in command line, get it from target.txt
+ if not self.BuildTargetList:
+ self.BuildTargetList = self.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_TARGET]
+
+ # if no tool chain given in command line, get it from target.txt
+ if not self.ToolChainList:
+ self.ToolChainList = self.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_TOOL_CHAIN_TAG]
+ if self.ToolChainList is None or len(self.ToolChainList) == 0:
+ EdkLogger.error("build", RESOURCE_NOT_AVAILABLE, ExtraData="No toolchain given. Don't know how to build.\n")
+
+ if not self.PlatformFile:
+ PlatformFile = self.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_ACTIVE_PLATFORM]
+ if not PlatformFile:
+ # Try to find one in current directory
+ WorkingDirectory = os.getcwd()
+ FileList = glob.glob(os.path.normpath(os.path.join(WorkingDirectory, '*.dsc')))
+ FileNum = len(FileList)
+ if FileNum >= 2:
+ EdkLogger.error("build", OPTION_MISSING,
+ ExtraData="There are %d DSC files in %s. Use '-p' to specify one.\n" % (FileNum, WorkingDirectory))
+ elif FileNum == 1:
+ PlatformFile = FileList[0]
+ else:
+ EdkLogger.error("build", RESOURCE_NOT_AVAILABLE,
+ ExtraData="No active platform specified in target.txt or command line! Nothing can be built.\n")
+
+ self.PlatformFile = PathClass(NormFile(PlatformFile, self.WorkspaceDir), self.WorkspaceDir)
+
+ self.GetToolChainAndFamilyFromDsc (self.PlatformFile)
+
+ # check if the tool chains are defined or not
+ NewToolChainList = []
+ for ToolChain in self.ToolChainList:
+ if ToolChain not in self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG]:
+ EdkLogger.warn("build", "Tool chain [%s] is not defined" % ToolChain)
+ else:
+ NewToolChainList.append(ToolChain)
+ # if no tool chain available, break the build
+ if len(NewToolChainList) == 0:
+ EdkLogger.error("build", RESOURCE_NOT_AVAILABLE,
+ ExtraData="[%s] not defined. No toolchain available for build!\n" % ", ".join(self.ToolChainList))
+ else:
+ self.ToolChainList = NewToolChainList
+
+ ToolChainFamily = []
+ ToolDefinition = self.ToolDef.ToolsDefTxtDatabase
+ for Tool in self.ToolChainList:
+ if TAB_TOD_DEFINES_FAMILY not in ToolDefinition or Tool not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \
+ or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][Tool]:
+ EdkLogger.warn("build", "No tool chain family found in configuration for %s. Default to MSFT." % Tool)
+ ToolChainFamily.append(TAB_COMPILER_MSFT)
+ else:
+ ToolChainFamily.append(ToolDefinition[TAB_TOD_DEFINES_FAMILY][Tool])
+ self.ToolChainFamily = ToolChainFamily
+
+ self.ThreadNumber = ThreadNum()
+ ## Initialize build configuration
+ #
+ # This method will parse DSC file and merge the configurations from
+ # command line and target.txt, then get the final build configurations.
+ #
+ def InitBuild(self):
+ # parse target.txt, tools_def.txt, and platform file
+ self.LoadConfiguration()
+
+ # Allow case-insensitive for those from command line or configuration file
+ ErrorCode, ErrorInfo = self.PlatformFile.Validate(".dsc", False)
+ if ErrorCode != 0:
+ EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo)
+
+
+ def InitPreBuild(self):
+ self.LoadConfiguration()
+ ErrorCode, ErrorInfo = self.PlatformFile.Validate(".dsc", False)
+ if ErrorCode != 0:
+ EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo)
+ if self.BuildTargetList:
+ GlobalData.gGlobalDefines['TARGET'] = self.BuildTargetList[0]
+ if self.ArchList:
+ GlobalData.gGlobalDefines['ARCH'] = self.ArchList[0]
+ if self.ToolChainList:
+ GlobalData.gGlobalDefines['TOOLCHAIN'] = self.ToolChainList[0]
+ GlobalData.gGlobalDefines['TOOL_CHAIN_TAG'] = self.ToolChainList[0]
+ if self.ToolChainFamily:
+ GlobalData.gGlobalDefines['FAMILY'] = self.ToolChainFamily[0]
+ if 'PREBUILD' in GlobalData.gCommandLineDefines:
+ self.Prebuild = GlobalData.gCommandLineDefines.get('PREBUILD')
+ else:
+ self.Db_Flag = True
+ Platform = self.Db.MapPlatform(str(self.PlatformFile))
+ self.Prebuild = str(Platform.Prebuild)
+ if self.Prebuild:
+ PrebuildList = []
+ #
+ # Evaluate all arguments and convert arguments that are WORKSPACE
+ # relative paths to absolute paths. Filter arguments that look like
+ # flags or do not follow the file/dir naming rules to avoid false
+ # positives on this conversion.
+ #
+ for Arg in self.Prebuild.split():
+ #
+ # Do not modify Arg if it looks like a flag or an absolute file path
+ #
+ if Arg.startswith('-') or os.path.isabs(Arg):
+ PrebuildList.append(Arg)
+ continue
+ #
+ # Do not modify Arg if it does not look like a Workspace relative
+ # path that starts with a valid package directory name
+ #
+ if not Arg[0].isalpha() or os.path.dirname(Arg) == '':
+ PrebuildList.append(Arg)
+ continue
+ #
+ # If Arg looks like a WORKSPACE relative path, then convert to an
+ # absolute path and check to see if the file exists.
+ #
+ Temp = mws.join(self.WorkspaceDir, Arg)
+ if os.path.isfile(Temp):
+ Arg = Temp
+ PrebuildList.append(Arg)
+ self.Prebuild = ' '.join(PrebuildList)
+ self.Prebuild += self.PassCommandOption(self.BuildTargetList, self.ArchList, self.ToolChainList, self.PlatformFile, self.Target)
+
+ def InitPostBuild(self):
+ if 'POSTBUILD' in GlobalData.gCommandLineDefines:
+ self.Postbuild = GlobalData.gCommandLineDefines.get('POSTBUILD')
+ else:
+ Platform = self.Db.MapPlatform(str(self.PlatformFile))
+ self.Postbuild = str(Platform.Postbuild)
+ if self.Postbuild:
+ PostbuildList = []
+ #
+ # Evaluate all arguments and convert arguments that are WORKSPACE
+ # relative paths to absolute paths. Filter arguments that look like
+ # flags or do not follow the file/dir naming rules to avoid false
+ # positives on this conversion.
+ #
+ for Arg in self.Postbuild.split():
+ #
+ # Do not modify Arg if it looks like a flag or an absolute file path
+ #
+ if Arg.startswith('-') or os.path.isabs(Arg):
+ PostbuildList.append(Arg)
+ continue
+ #
+ # Do not modify Arg if it does not look like a Workspace relative
+ # path that starts with a valid package directory name
+ #
+ if not Arg[0].isalpha() or os.path.dirname(Arg) == '':
+ PostbuildList.append(Arg)
+ continue
+ #
+ # If Arg looks like a WORKSPACE relative path, then convert to an
+ # absolute path and check to see if the file exists.
+ #
+ Temp = mws.join(self.WorkspaceDir, Arg)
+ if os.path.isfile(Temp):
+ Arg = Temp
+ PostbuildList.append(Arg)
+ self.Postbuild = ' '.join(PostbuildList)
+ self.Postbuild += self.PassCommandOption(self.BuildTargetList, self.ArchList, self.ToolChainList, self.PlatformFile, self.Target)
+
+ def PassCommandOption(self, BuildTarget, TargetArch, ToolChain, PlatformFile, Target):
+ BuildStr = ''
+ if GlobalData.gCommand and isinstance(GlobalData.gCommand, list):
+ BuildStr += ' ' + ' '.join(GlobalData.gCommand)
+ TargetFlag = False
+ ArchFlag = False
+ ToolChainFlag = False
+ PlatformFileFlag = False
+
+ if GlobalData.gOptions and not GlobalData.gOptions.BuildTarget:
+ TargetFlag = True
+ if GlobalData.gOptions and not GlobalData.gOptions.TargetArch:
+ ArchFlag = True
+ if GlobalData.gOptions and not GlobalData.gOptions.ToolChain:
+ ToolChainFlag = True
+ if GlobalData.gOptions and not GlobalData.gOptions.PlatformFile:
+ PlatformFileFlag = True
+
+ if TargetFlag and BuildTarget:
+ if isinstance(BuildTarget, list) or isinstance(BuildTarget, tuple):
+ BuildStr += ' -b ' + ' -b '.join(BuildTarget)
+ elif isinstance(BuildTarget, str):
+ BuildStr += ' -b ' + BuildTarget
+ if ArchFlag and TargetArch:
+ if isinstance(TargetArch, list) or isinstance(TargetArch, tuple):
+ BuildStr += ' -a ' + ' -a '.join(TargetArch)
+ elif isinstance(TargetArch, str):
+ BuildStr += ' -a ' + TargetArch
+ if ToolChainFlag and ToolChain:
+ if isinstance(ToolChain, list) or isinstance(ToolChain, tuple):
+ BuildStr += ' -t ' + ' -t '.join(ToolChain)
+ elif isinstance(ToolChain, str):
+ BuildStr += ' -t ' + ToolChain
+ if PlatformFileFlag and PlatformFile:
+ if isinstance(PlatformFile, list) or isinstance(PlatformFile, tuple):
+ BuildStr += ' -p ' + ' -p '.join(PlatformFile)
+ elif isinstance(PlatformFile, str):
+ BuildStr += ' -p' + PlatformFile
+ BuildStr += ' --conf=' + GlobalData.gConfDirectory
+ if Target:
+ BuildStr += ' ' + Target
+
+ return BuildStr
+
+ def LaunchPrebuild(self):
+ if self.Prebuild:
+ EdkLogger.info("\n- Prebuild Start -\n")
+ self.LaunchPrebuildFlag = True
+ #
+ # The purpose of .PrebuildEnv file is capture environment variable settings set by the prebuild script
+ # and preserve them for the rest of the main build step, because the child process environment will
+ # evaporate as soon as it exits, we cannot get it in build step.
+ #
+ PrebuildEnvFile = os.path.join(GlobalData.gConfDirectory, '.cache', '.PrebuildEnv')
+ if os.path.isfile(PrebuildEnvFile):
+ os.remove(PrebuildEnvFile)
+ if os.path.isfile(self.PlatformBuildPath):
+ os.remove(self.PlatformBuildPath)
+ if sys.platform == "win32":
+ args = ' && '.join((self.Prebuild, 'set > ' + PrebuildEnvFile))
+ Process = Popen(args, stdout=PIPE, stderr=PIPE, shell=True)
+ else:
+ args = ' && '.join((self.Prebuild, 'env > ' + PrebuildEnvFile))
+ Process = Popen(args, stdout=PIPE, stderr=PIPE, shell=True)
+
+ # launch two threads to read the STDOUT and STDERR
+ EndOfProcedure = Event()
+ EndOfProcedure.clear()
+ if Process.stdout:
+ StdOutThread = Thread(target=ReadMessage, args=(Process.stdout, EdkLogger.info, EndOfProcedure))
+ StdOutThread.setName("STDOUT-Redirector")
+ StdOutThread.setDaemon(False)
+ StdOutThread.start()
+
+ if Process.stderr:
+ StdErrThread = Thread(target=ReadMessage, args=(Process.stderr, EdkLogger.quiet, EndOfProcedure))
+ StdErrThread.setName("STDERR-Redirector")
+ StdErrThread.setDaemon(False)
+ StdErrThread.start()
+ # waiting for program exit
+ Process.wait()
+
+ if Process.stdout:
+ StdOutThread.join()
+ if Process.stderr:
+ StdErrThread.join()
+ if Process.returncode != 0 :
+ EdkLogger.error("Prebuild", PREBUILD_ERROR, 'Prebuild process is not success!')
+
+ if os.path.exists(PrebuildEnvFile):
+ f = open(PrebuildEnvFile)
+ envs = f.readlines()
+ f.close()
+ envs = [l.split("=", 1) for l in envs ]
+ envs = [[I.strip() for I in item] for item in envs if len(item) == 2]
+ os.environ.update(dict(envs))
+ EdkLogger.info("\n- Prebuild Done -\n")
+
+ def LaunchPostbuild(self):
+ if self.Postbuild:
+ EdkLogger.info("\n- Postbuild Start -\n")
+ if sys.platform == "win32":
+ Process = Popen(self.Postbuild, stdout=PIPE, stderr=PIPE, shell=True)
+ else:
+ Process = Popen(self.Postbuild, stdout=PIPE, stderr=PIPE, shell=True)
+ # launch two threads to read the STDOUT and STDERR
+ EndOfProcedure = Event()
+ EndOfProcedure.clear()
+ if Process.stdout:
+ StdOutThread = Thread(target=ReadMessage, args=(Process.stdout, EdkLogger.info, EndOfProcedure))
+ StdOutThread.setName("STDOUT-Redirector")
+ StdOutThread.setDaemon(False)
+ StdOutThread.start()
+
+ if Process.stderr:
+ StdErrThread = Thread(target=ReadMessage, args=(Process.stderr, EdkLogger.quiet, EndOfProcedure))
+ StdErrThread.setName("STDERR-Redirector")
+ StdErrThread.setDaemon(False)
+ StdErrThread.start()
+ # waiting for program exit
+ Process.wait()
+
+ if Process.stdout:
+ StdOutThread.join()
+ if Process.stderr:
+ StdErrThread.join()
+ if Process.returncode != 0 :
+ EdkLogger.error("Postbuild", POSTBUILD_ERROR, 'Postbuild process is not success!')
+ EdkLogger.info("\n- Postbuild Done -\n")
+
+ ## Build a module or platform
+ #
+ # Create autogen code and makefile for a module or platform, and the launch
+ # "make" command to build it
+ #
+ # @param Target The target of build command
+ # @param Platform The platform file
+ # @param Module The module file
+ # @param BuildTarget The name of build target, one of "DEBUG", "RELEASE"
+ # @param ToolChain The name of toolchain to build
+ # @param Arch The arch of the module/platform
+ # @param CreateDepModuleCodeFile Flag used to indicate creating code
+ # for dependent modules/Libraries
+ # @param CreateDepModuleMakeFile Flag used to indicate creating makefile
+ # for dependent modules/Libraries
+ #
+ def _BuildPa(self, Target, AutoGenObject, CreateDepsCodeFile=True, CreateDepsMakeFile=True, BuildModule=False, FfsCommand=None, PcdMaList=None):
+ if AutoGenObject is None:
+ return False
+ if FfsCommand is None:
+ FfsCommand = {}
+ # skip file generation for cleanxxx targets, run and fds target
+ if Target not in ['clean', 'cleanlib', 'cleanall', 'run', 'fds']:
+ # for target which must generate AutoGen code and makefile
+ mqueue = mp.Queue()
+ for m in AutoGenObject.GetAllModuleInfo:
+ mqueue.put(m)
+ mqueue.put((None,None,None,None,None,None,None))
+ AutoGenObject.DataPipe.DataContainer = {"CommandTarget": self.Target}
+ AutoGenObject.DataPipe.DataContainer = {"Workspace_timestamp": AutoGenObject.Workspace._SrcTimeStamp}
+ AutoGenObject.CreateLibModuelDirs()
+ AutoGenObject.DataPipe.DataContainer = {"LibraryBuildDirectoryList":AutoGenObject.LibraryBuildDirectoryList}
+ AutoGenObject.DataPipe.DataContainer = {"ModuleBuildDirectoryList":AutoGenObject.ModuleBuildDirectoryList}
+ AutoGenObject.DataPipe.DataContainer = {"FdsCommandDict": AutoGenObject.Workspace.GenFdsCommandDict}
+ self.Progress.Start("Generating makefile and code")
+ data_pipe_file = os.path.join(AutoGenObject.BuildDir, "GlobalVar_%s_%s.bin" % (str(AutoGenObject.Guid),AutoGenObject.Arch))
+ AutoGenObject.DataPipe.dump(data_pipe_file)
+ cqueue = mp.Queue()
+ autogen_rt,errorcode = self.StartAutoGen(mqueue, AutoGenObject.DataPipe, self.SkipAutoGen, PcdMaList, cqueue)
+ AutoGenIdFile = os.path.join(GlobalData.gConfDirectory,".AutoGenIdFile.txt")
+ with open(AutoGenIdFile,"w") as fw:
+ fw.write("Arch=%s\n" % "|".join((AutoGenObject.Workspace.ArchList)))
+ fw.write("BuildDir=%s\n" % AutoGenObject.Workspace.BuildDir)
+ fw.write("PlatformGuid=%s\n" % str(AutoGenObject.Guid))
+ self.Progress.Stop("done!")
+ if not autogen_rt:
+ self.AutoGenMgr.TerminateWorkers()
+ self.AutoGenMgr.join(1)
+ raise FatalError(errorcode)
+ AutoGenObject.CreateCodeFile(False)
+ AutoGenObject.CreateMakeFile(False)
+ else:
+ # always recreate top/platform makefile when clean, just in case of inconsistency
+ AutoGenObject.CreateCodeFile(True)
+ AutoGenObject.CreateMakeFile(True)
+
+ if EdkLogger.GetLevel() == EdkLogger.QUIET:
+ EdkLogger.quiet("Building ... %s" % repr(AutoGenObject))
+
+ BuildCommand = AutoGenObject.BuildCommand
+ if BuildCommand is None or len(BuildCommand) == 0:
+ EdkLogger.error("build", OPTION_MISSING,
+ "No build command found for this module. "
+ "Please check your setting of %s_%s_%s_MAKE_PATH in Conf/tools_def.txt file." %
+ (AutoGenObject.BuildTarget, AutoGenObject.ToolChain, AutoGenObject.Arch),
+ ExtraData=str(AutoGenObject))
+
+ # run
+ if Target == 'run':
+ return True
+
+ # build modules
+ if BuildModule:
+ BuildCommand = BuildCommand + [Target]
+ LaunchCommand(BuildCommand, AutoGenObject.MakeFileDir)
+ if GlobalData.gBinCacheDest:
+ self.GenDestCache()
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
+ # Only for --hash
+ # Update PreMakeCacheChain files
+ self.GenLocalPreMakeCache()
+ self.BuildModules = []
+ return True
+
+ # build library
+ if Target == 'libraries':
+ DirList = []
+ for Lib in AutoGenObject.LibraryAutoGenList:
+ if not Lib.IsBinaryModule:
+ DirList.append((os.path.join(AutoGenObject.BuildDir, Lib.BuildDir),Lib))
+ for Lib, LibAutoGen in DirList:
+ NewBuildCommand = BuildCommand + ['-f', os.path.normpath(os.path.join(Lib, self.MakeFileName)), 'pbuild']
+ LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir,LibAutoGen)
+ return True
+
+ # build module
+ if Target == 'modules':
+ DirList = []
+ for Lib in AutoGenObject.LibraryAutoGenList:
+ if not Lib.IsBinaryModule:
+ DirList.append((os.path.join(AutoGenObject.BuildDir, Lib.BuildDir),Lib))
+ for Lib, LibAutoGen in DirList:
+ NewBuildCommand = BuildCommand + ['-f', os.path.normpath(os.path.join(Lib, self.MakeFileName)), 'pbuild']
+ LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir,LibAutoGen)
+
+ DirList = []
+ for ModuleAutoGen in AutoGenObject.ModuleAutoGenList:
+ if not ModuleAutoGen.IsBinaryModule:
+ DirList.append((os.path.join(AutoGenObject.BuildDir, ModuleAutoGen.BuildDir),ModuleAutoGen))
+ for Mod,ModAutoGen in DirList:
+ NewBuildCommand = BuildCommand + ['-f', os.path.normpath(os.path.join(Mod, self.MakeFileName)), 'pbuild']
+ LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir,ModAutoGen)
+ self.CreateAsBuiltInf()
+ if GlobalData.gBinCacheDest:
+ self.GenDestCache()
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
+ # Only for --hash
+ # Update PreMakeCacheChain files
+ self.GenLocalPreMakeCache()
+ self.BuildModules = []
+ return True
+
+ # cleanlib
+ if Target == 'cleanlib':
+ for Lib in AutoGenObject.LibraryBuildDirectoryList:
+ LibMakefile = os.path.normpath(os.path.join(Lib, self.MakeFileName))
+ if os.path.exists(LibMakefile):
+ NewBuildCommand = BuildCommand + ['-f', LibMakefile, 'cleanall']
+ LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir)
+ return True
+
+ # clean
+ if Target == 'clean':
+ for Mod in AutoGenObject.ModuleBuildDirectoryList:
+ ModMakefile = os.path.normpath(os.path.join(Mod, self.MakeFileName))
+ if os.path.exists(ModMakefile):
+ NewBuildCommand = BuildCommand + ['-f', ModMakefile, 'cleanall']
+ LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir)
+ for Lib in AutoGenObject.LibraryBuildDirectoryList:
+ LibMakefile = os.path.normpath(os.path.join(Lib, self.MakeFileName))
+ if os.path.exists(LibMakefile):
+ NewBuildCommand = BuildCommand + ['-f', LibMakefile, 'cleanall']
+ LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir)
+ return True
+
+ # cleanall
+ if Target == 'cleanall':
+ try:
+ #os.rmdir(AutoGenObject.BuildDir)
+ RemoveDirectory(AutoGenObject.BuildDir, True)
+ except WindowsError as X:
+ EdkLogger.error("build", FILE_DELETE_FAILURE, ExtraData=str(X))
+ return True
+
+ ## Build a module or platform
+ #
+ # Create autogen code and makefile for a module or platform, and the launch
+ # "make" command to build it
+ #
+ # @param Target The target of build command
+ # @param Platform The platform file
+ # @param Module The module file
+ # @param BuildTarget The name of build target, one of "DEBUG", "RELEASE"
+ # @param ToolChain The name of toolchain to build
+ # @param Arch The arch of the module/platform
+ # @param CreateDepModuleCodeFile Flag used to indicate creating code
+ # for dependent modules/Libraries
+ # @param CreateDepModuleMakeFile Flag used to indicate creating makefile
+ # for dependent modules/Libraries
+ #
+ def _Build(self, Target, AutoGenObject, CreateDepsCodeFile=True, CreateDepsMakeFile=True, BuildModule=False):
+ if AutoGenObject is None:
+ return False
+
+ # skip file generation for cleanxxx targets, run and fds target
+ if Target not in ['clean', 'cleanlib', 'cleanall', 'run', 'fds']:
+ # for target which must generate AutoGen code and makefile
+ if not self.SkipAutoGen or Target == 'genc':
+ self.Progress.Start("Generating code")
+ AutoGenObject.CreateCodeFile(CreateDepsCodeFile)
+ self.Progress.Stop("done!")
+ if Target == "genc":
+ return True
+
+ if not self.SkipAutoGen or Target == 'genmake':
+ self.Progress.Start("Generating makefile")
+ AutoGenObject.CreateMakeFile(CreateDepsMakeFile)
+ #AutoGenObject.CreateAsBuiltInf()
+ self.Progress.Stop("done!")
+ if Target == "genmake":
+ return True
+ else:
+ # always recreate top/platform makefile when clean, just in case of inconsistency
+ AutoGenObject.CreateCodeFile(True)
+ AutoGenObject.CreateMakeFile(True)
+
+ if EdkLogger.GetLevel() == EdkLogger.QUIET:
+ EdkLogger.quiet("Building ... %s" % repr(AutoGenObject))
+
+ BuildCommand = AutoGenObject.BuildCommand
+ if BuildCommand is None or len(BuildCommand) == 0:
+ EdkLogger.error("build", OPTION_MISSING,
+ "No build command found for this module. "
+ "Please check your setting of %s_%s_%s_MAKE_PATH in Conf/tools_def.txt file." %
+ (AutoGenObject.BuildTarget, AutoGenObject.ToolChain, AutoGenObject.Arch),
+ ExtraData=str(AutoGenObject))
+
+ # build modules
+ if BuildModule:
+ if Target != 'fds':
+ BuildCommand = BuildCommand + [Target]
+ AutoGenObject.BuildTime = LaunchCommand(BuildCommand, AutoGenObject.MakeFileDir)
+ self.CreateAsBuiltInf()
+ if GlobalData.gBinCacheDest:
+ self.GenDestCache()
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
+ # Only for --hash
+ # Update PreMakeCacheChain files
+ self.GenLocalPreMakeCache()
+ self.BuildModules = []
+ return True
+
+ # genfds
+ if Target == 'fds':
+ if GenFdsApi(AutoGenObject.GenFdsCommandDict, self.Db):
+ EdkLogger.error("build", COMMAND_FAILURE)
+ Threshold = self.GetFreeSizeThreshold()
+ if Threshold:
+ self.CheckFreeSizeThreshold(Threshold, AutoGenObject.FvDir)
+ return True
+
+ # run
+ if Target == 'run':
+ return True
+
+ # build library
+ if Target == 'libraries':
+ pass
+
+ # not build modules
+
+
+ # cleanall
+ if Target == 'cleanall':
+ try:
+ #os.rmdir(AutoGenObject.BuildDir)
+ RemoveDirectory(AutoGenObject.BuildDir, True)
+ except WindowsError as X:
+ EdkLogger.error("build", FILE_DELETE_FAILURE, ExtraData=str(X))
+ return True
+
+ ## Rebase module image and Get function address for the input module list.
+ #
+ def _RebaseModule (self, MapBuffer, BaseAddress, ModuleList, AddrIsOffset = True, ModeIsSmm = False):
+ if ModeIsSmm:
+ AddrIsOffset = False
+ for InfFile in ModuleList:
+ sys.stdout.write (".")
+ sys.stdout.flush()
+ ModuleInfo = ModuleList[InfFile]
+ ModuleName = ModuleInfo.BaseName
+ ModuleOutputImage = ModuleInfo.Image.FileName
+ ModuleDebugImage = os.path.join(ModuleInfo.DebugDir, ModuleInfo.BaseName + '.efi')
+ ## for SMM module in SMRAM, the SMRAM will be allocated from base to top.
+ if not ModeIsSmm:
+ BaseAddress = BaseAddress - ModuleInfo.Image.Size
+ #
+ # Update Image to new BaseAddress by GenFw tool
+ #
+ LaunchCommand(["GenFw", "--rebase", str(BaseAddress), "-r", ModuleOutputImage], ModuleInfo.OutputDir)
+ LaunchCommand(["GenFw", "--rebase", str(BaseAddress), "-r", ModuleDebugImage], ModuleInfo.DebugDir)
+ else:
+ #
+ # Set new address to the section header only for SMM driver.
+ #
+ LaunchCommand(["GenFw", "--address", str(BaseAddress), "-r", ModuleOutputImage], ModuleInfo.OutputDir)
+ LaunchCommand(["GenFw", "--address", str(BaseAddress), "-r", ModuleDebugImage], ModuleInfo.DebugDir)
+ #
+ # Collect function address from Map file
+ #
+ ImageMapTable = ModuleOutputImage.replace('.efi', '.map')
+ FunctionList = []
+ if os.path.exists(ImageMapTable):
+ OrigImageBaseAddress = 0
+ ImageMap = open(ImageMapTable, 'r')
+ for LinStr in ImageMap:
+ if len (LinStr.strip()) == 0:
+ continue
+ #
+ # Get the preferred address set on link time.
+ #
+ if LinStr.find ('Preferred load address is') != -1:
+ StrList = LinStr.split()
+ OrigImageBaseAddress = int (StrList[len(StrList) - 1], 16)
+
+ StrList = LinStr.split()
+ if len (StrList) > 4:
+ if StrList[3] == 'f' or StrList[3] == 'F':
+ Name = StrList[1]
+ RelativeAddress = int (StrList[2], 16) - OrigImageBaseAddress
+ FunctionList.append ((Name, RelativeAddress))
+
+ ImageMap.close()
+ #
+ # Add general information.
+ #
+ if ModeIsSmm:
+ MapBuffer.append('\n\n%s (Fixed SMRAM Offset, BaseAddress=0x%010X, EntryPoint=0x%010X)\n' % (ModuleName, BaseAddress, BaseAddress + ModuleInfo.Image.EntryPoint))
+ elif AddrIsOffset:
+ MapBuffer.append('\n\n%s (Fixed Memory Offset, BaseAddress=-0x%010X, EntryPoint=-0x%010X)\n' % (ModuleName, 0 - BaseAddress, 0 - (BaseAddress + ModuleInfo.Image.EntryPoint)))
+ else:
+ MapBuffer.append('\n\n%s (Fixed Memory Address, BaseAddress=0x%010X, EntryPoint=0x%010X)\n' % (ModuleName, BaseAddress, BaseAddress + ModuleInfo.Image.EntryPoint))
+ #
+ # Add guid and general seciton section.
+ #
+ TextSectionAddress = 0
+ DataSectionAddress = 0
+ for SectionHeader in ModuleInfo.Image.SectionHeaderList:
+ if SectionHeader[0] == '.text':
+ TextSectionAddress = SectionHeader[1]
+ elif SectionHeader[0] in ['.data', '.sdata']:
+ DataSectionAddress = SectionHeader[1]
+ if AddrIsOffset:
+ MapBuffer.append('(GUID=%s, .textbaseaddress=-0x%010X, .databaseaddress=-0x%010X)\n' % (ModuleInfo.Guid, 0 - (BaseAddress + TextSectionAddress), 0 - (BaseAddress + DataSectionAddress)))
+ else:
+ MapBuffer.append('(GUID=%s, .textbaseaddress=0x%010X, .databaseaddress=0x%010X)\n' % (ModuleInfo.Guid, BaseAddress + TextSectionAddress, BaseAddress + DataSectionAddress))
+ #
+ # Add debug image full path.
+ #
+ MapBuffer.append('(IMAGE=%s)\n\n' % (ModuleDebugImage))
+ #
+ # Add function address
+ #
+ for Function in FunctionList:
+ if AddrIsOffset:
+ MapBuffer.append(' -0x%010X %s\n' % (0 - (BaseAddress + Function[1]), Function[0]))
+ else:
+ MapBuffer.append(' 0x%010X %s\n' % (BaseAddress + Function[1], Function[0]))
+ ImageMap.close()
+
+ #
+ # for SMM module in SMRAM, the SMRAM will be allocated from base to top.
+ #
+ if ModeIsSmm:
+ BaseAddress = BaseAddress + ModuleInfo.Image.Size
+
+ ## Collect MAP information of all FVs
+ #
+ def _CollectFvMapBuffer (self, MapBuffer, Wa, ModuleList):
+ if self.Fdf:
+ # First get the XIP base address for FV map file.
+ GuidPattern = re.compile("[-a-fA-F0-9]+")
+ GuidName = re.compile(r"\(GUID=[-a-fA-F0-9]+")
+ for FvName in Wa.FdfProfile.FvDict:
+ FvMapBuffer = os.path.join(Wa.FvDir, FvName + '.Fv.map')
+ if not os.path.exists(FvMapBuffer):
+ continue
+ FvMap = open(FvMapBuffer, 'r')
+ #skip FV size information
+ FvMap.readline()
+ FvMap.readline()
+ FvMap.readline()
+ FvMap.readline()
+ for Line in FvMap:
+ MatchGuid = GuidPattern.match(Line)
+ if MatchGuid is not None:
+ #
+ # Replace GUID with module name
+ #
+ GuidString = MatchGuid.group()
+ if GuidString.upper() in ModuleList:
+ Line = Line.replace(GuidString, ModuleList[GuidString.upper()].Name)
+ MapBuffer.append(Line)
+ #
+ # Add the debug image full path.
+ #
+ MatchGuid = GuidName.match(Line)
+ if MatchGuid is not None:
+ GuidString = MatchGuid.group().split("=")[1]
+ if GuidString.upper() in ModuleList:
+ MapBuffer.append('(IMAGE=%s)\n' % (os.path.join(ModuleList[GuidString.upper()].DebugDir, ModuleList[GuidString.upper()].Name + '.efi')))
+
+ FvMap.close()
+
+ ## Collect MAP information of all modules
+ #
+ def _CollectModuleMapBuffer (self, MapBuffer, ModuleList):
+ sys.stdout.write ("Generate Load Module At Fix Address Map")
+ sys.stdout.flush()
+ PatchEfiImageList = []
+ PeiModuleList = {}
+ BtModuleList = {}
+ RtModuleList = {}
+ SmmModuleList = {}
+ PeiSize = 0
+ BtSize = 0
+ RtSize = 0
+ # reserve 4K size in SMRAM to make SMM module address not from 0.
+ SmmSize = 0x1000
+ for ModuleGuid in ModuleList:
+ Module = ModuleList[ModuleGuid]
+ GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (Module.MetaFile, Module.Arch, Module.ToolChain, Module.BuildTarget)
+
+ OutputImageFile = ''
+ for ResultFile in Module.CodaTargetList:
+ if str(ResultFile.Target).endswith('.efi'):
+ #
+ # module list for PEI, DXE, RUNTIME and SMM
+ #
+ OutputImageFile = os.path.join(Module.OutputDir, Module.Name + '.efi')
+ ImageClass = PeImageClass (OutputImageFile)
+ if not ImageClass.IsValid:
+ EdkLogger.error("build", FILE_PARSE_FAILURE, ExtraData=ImageClass.ErrorInfo)
+ ImageInfo = PeImageInfo(Module.Name, Module.Guid, Module.Arch, Module.OutputDir, Module.DebugDir, ImageClass)
+ if Module.ModuleType in [SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, EDK_COMPONENT_TYPE_COMBINED_PEIM_DRIVER, EDK_COMPONENT_TYPE_PIC_PEIM, EDK_COMPONENT_TYPE_RELOCATABLE_PEIM, SUP_MODULE_DXE_CORE]:
+ PeiModuleList[Module.MetaFile] = ImageInfo
+ PeiSize += ImageInfo.Image.Size
+ elif Module.ModuleType in [EDK_COMPONENT_TYPE_BS_DRIVER, SUP_MODULE_DXE_DRIVER, SUP_MODULE_UEFI_DRIVER]:
+ BtModuleList[Module.MetaFile] = ImageInfo
+ BtSize += ImageInfo.Image.Size
+ elif Module.ModuleType in [SUP_MODULE_DXE_RUNTIME_DRIVER, EDK_COMPONENT_TYPE_RT_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, EDK_COMPONENT_TYPE_SAL_RT_DRIVER]:
+ RtModuleList[Module.MetaFile] = ImageInfo
+ RtSize += ImageInfo.Image.Size
+ elif Module.ModuleType in [SUP_MODULE_SMM_CORE, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
+ SmmModuleList[Module.MetaFile] = ImageInfo
+ SmmSize += ImageInfo.Image.Size
+ if Module.ModuleType == SUP_MODULE_DXE_SMM_DRIVER:
+ PiSpecVersion = Module.Module.Specification.get('PI_SPECIFICATION_VERSION', '0x00000000')
+ # for PI specification < PI1.1, DXE_SMM_DRIVER also runs as BOOT time driver.
+ if int(PiSpecVersion, 16) < 0x0001000A:
+ BtModuleList[Module.MetaFile] = ImageInfo
+ BtSize += ImageInfo.Image.Size
+ break
+ #
+ # EFI image is final target.
+ # Check EFI image contains patchable FixAddress related PCDs.
+ #
+ if OutputImageFile != '':
+ ModuleIsPatch = False
+ for Pcd in Module.ModulePcdList:
+ if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE and Pcd.TokenCName in TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SET:
+ ModuleIsPatch = True
+ break
+ if not ModuleIsPatch:
+ for Pcd in Module.LibraryPcdList:
+ if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE and Pcd.TokenCName in TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SET:
+ ModuleIsPatch = True
+ break
+
+ if not ModuleIsPatch:
+ continue
+ #
+ # Module includes the patchable load fix address PCDs.
+ # It will be fixed up later.
+ #
+ PatchEfiImageList.append (OutputImageFile)
+
+ #
+ # Get Top Memory address
+ #
+ ReservedRuntimeMemorySize = 0
+ TopMemoryAddress = 0
+ if self.LoadFixAddress == 0xFFFFFFFFFFFFFFFF:
+ TopMemoryAddress = 0
+ else:
+ TopMemoryAddress = self.LoadFixAddress
+ if TopMemoryAddress < RtSize + BtSize + PeiSize:
+ EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS is too low to load driver")
+
+ #
+ # Patch FixAddress related PCDs into EFI image
+ #
+ for EfiImage in PatchEfiImageList:
+ EfiImageMap = EfiImage.replace('.efi', '.map')
+ if not os.path.exists(EfiImageMap):
+ continue
+ #
+ # Get PCD offset in EFI image by GenPatchPcdTable function
+ #
+ PcdTable = parsePcdInfoFromMapFile(EfiImageMap, EfiImage)
+ #
+ # Patch real PCD value by PatchPcdValue tool
+ #
+ for PcdInfo in PcdTable:
+ ReturnValue = 0
+ if PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE:
+ ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE_DATA_TYPE, str (PeiSize // 0x1000))
+ elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE:
+ ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE_DATA_TYPE, str (BtSize // 0x1000))
+ elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE:
+ ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE_DATA_TYPE, str (RtSize // 0x1000))
+ elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE and len (SmmModuleList) > 0:
+ ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE_DATA_TYPE, str (SmmSize // 0x1000))
+ if ReturnValue != 0:
+ EdkLogger.error("build", PARAMETER_INVALID, "Patch PCD value failed", ExtraData=ErrorInfo)
+
+ MapBuffer.append('PEI_CODE_PAGE_NUMBER = 0x%x\n' % (PeiSize // 0x1000))
+ MapBuffer.append('BOOT_CODE_PAGE_NUMBER = 0x%x\n' % (BtSize // 0x1000))
+ MapBuffer.append('RUNTIME_CODE_PAGE_NUMBER = 0x%x\n' % (RtSize // 0x1000))
+ if len (SmmModuleList) > 0:
+ MapBuffer.append('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize // 0x1000))
+
+ PeiBaseAddr = TopMemoryAddress - RtSize - BtSize
+ BtBaseAddr = TopMemoryAddress - RtSize
+ RtBaseAddr = TopMemoryAddress - ReservedRuntimeMemorySize
+
+ self._RebaseModule (MapBuffer, PeiBaseAddr, PeiModuleList, TopMemoryAddress == 0)
+ self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0)
+ self._RebaseModule (MapBuffer, RtBaseAddr, RtModuleList, TopMemoryAddress == 0)
+ self._RebaseModule (MapBuffer, 0x1000, SmmModuleList, AddrIsOffset=False, ModeIsSmm=True)
+ MapBuffer.append('\n\n')
+ sys.stdout.write ("\n")
+ sys.stdout.flush()
+
+ ## Save platform Map file
+ #
+ def _SaveMapFile (self, MapBuffer, Wa):
+ #
+ # Map file path is got.
+ #
+ MapFilePath = os.path.join(Wa.BuildDir, Wa.Name + '.map')
+ #
+ # Save address map into MAP file.
+ #
+ SaveFileOnChange(MapFilePath, ''.join(MapBuffer), False)
+ if self.LoadFixAddress != 0:
+ sys.stdout.write ("\nLoad Module At Fix Address Map file can be found at %s\n" % (MapFilePath))
+ sys.stdout.flush()
+
+ ## Build active platform for different build targets and different tool chains
+ #
+ def _BuildPlatform(self):
+ SaveFileOnChange(self.PlatformBuildPath, '# DO NOT EDIT \n# FILE auto-generated\n', False)
+ for BuildTarget in self.BuildTargetList:
+ GlobalData.gGlobalDefines['TARGET'] = BuildTarget
+ index = 0
+ for ToolChain in self.ToolChainList:
+ GlobalData.gGlobalDefines['TOOLCHAIN'] = ToolChain
+ GlobalData.gGlobalDefines['TOOL_CHAIN_TAG'] = ToolChain
+ GlobalData.gGlobalDefines['FAMILY'] = self.ToolChainFamily[index]
+ index += 1
+ Wa = WorkspaceAutoGen(
+ self.WorkspaceDir,
+ self.PlatformFile,
+ BuildTarget,
+ ToolChain,
+ self.ArchList,
+ self.BuildDatabase,
+ self.TargetTxt,
+ self.ToolDef,
+ self.Fdf,
+ self.FdList,
+ self.FvList,
+ self.CapList,
+ self.SkuId,
+ self.UniFlag,
+ self.Progress
+ )
+ self.Fdf = Wa.FdfFile
+ self.LoadFixAddress = Wa.Platform.LoadFixAddress
+ self.BuildReport.AddPlatformReport(Wa)
+ self.Progress.Stop("done!")
+
+ # Add ffs build to makefile
+ CmdListDict = {}
+ if GlobalData.gEnableGenfdsMultiThread and self.Fdf:
+ CmdListDict = self._GenFfsCmd(Wa.ArchList)
+
+ for Arch in Wa.ArchList:
+ PcdMaList = []
+ GlobalData.gGlobalDefines['ARCH'] = Arch
+ Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)
+ for Module in Pa.Platform.Modules:
+ # Get ModuleAutoGen object to generate C code file and makefile
+ Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
+ if Ma is None:
+ continue
+ if Ma.PcdIsDriver:
+ Ma.PlatformInfo = Pa
+ Ma.Workspace = Wa
+ PcdMaList.append(Ma)
+ self.BuildModules.append(Ma)
+ Pa.DataPipe.DataContainer = {"FfsCommand":CmdListDict}
+ Pa.DataPipe.DataContainer = {"Workspace_timestamp": Wa._SrcTimeStamp}
+ self._BuildPa(self.Target, Pa, FfsCommand=CmdListDict,PcdMaList=PcdMaList)
+
+ # Create MAP file when Load Fix Address is enabled.
+ if self.Target in ["", "all", "fds"]:
+ for Arch in Wa.ArchList:
+ GlobalData.gGlobalDefines['ARCH'] = Arch
+ #
+ # Check whether the set fix address is above 4G for 32bit image.
+ #
+ if (Arch == 'IA32' or Arch == 'ARM') and self.LoadFixAddress != 0xFFFFFFFFFFFFFFFF and self.LoadFixAddress >= 0x100000000:
+ EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS can't be set to larger than or equal to 4G for the platform with IA32 or ARM arch modules")
+ #
+ # Get Module List
+ #
+ ModuleList = {}
+ for Pa in Wa.AutoGenObjectList:
+ for Ma in Pa.ModuleAutoGenList:
+ if Ma is None:
+ continue
+ if not Ma.IsLibrary:
+ ModuleList[Ma.Guid.upper()] = Ma
+
+ MapBuffer = []
+ if self.LoadFixAddress != 0:
+ #
+ # Rebase module to the preferred memory address before GenFds
+ #
+ self._CollectModuleMapBuffer(MapBuffer, ModuleList)
+ if self.Fdf:
+ #
+ # create FDS again for the updated EFI image
+ #
+ self._Build("fds", Wa)
+ #
+ # Create MAP file for all platform FVs after GenFds.
+ #
+ self._CollectFvMapBuffer(MapBuffer, Wa, ModuleList)
+ #
+ # Save MAP buffer into MAP file.
+ #
+ self._SaveMapFile (MapBuffer, Wa)
+ self.CreateGuidedSectionToolsFile(Wa)
+
+ ## Build active module for different build targets, different tool chains and different archs
+ #
+ def _BuildModule(self):
+ for BuildTarget in self.BuildTargetList:
+ GlobalData.gGlobalDefines['TARGET'] = BuildTarget
+ index = 0
+ for ToolChain in self.ToolChainList:
+ WorkspaceAutoGenTime = time.time()
+ GlobalData.gGlobalDefines['TOOLCHAIN'] = ToolChain
+ GlobalData.gGlobalDefines['TOOL_CHAIN_TAG'] = ToolChain
+ GlobalData.gGlobalDefines['FAMILY'] = self.ToolChainFamily[index]
+ index += 1
+ #
+ # module build needs platform build information, so get platform
+ # AutoGen first
+ #
+ Wa = WorkspaceAutoGen(
+ self.WorkspaceDir,
+ self.PlatformFile,
+ BuildTarget,
+ ToolChain,
+ self.ArchList,
+ self.BuildDatabase,
+ self.TargetTxt,
+ self.ToolDef,
+ self.Fdf,
+ self.FdList,
+ self.FvList,
+ self.CapList,
+ self.SkuId,
+ self.UniFlag,
+ self.Progress,
+ self.ModuleFile
+ )
+ self.Fdf = Wa.FdfFile
+ self.LoadFixAddress = Wa.Platform.LoadFixAddress
+ Wa.CreateMakeFile(False)
+ # Add ffs build to makefile
+ CmdListDict = None
+ if GlobalData.gEnableGenfdsMultiThread and self.Fdf:
+ CmdListDict = self._GenFfsCmd(Wa.ArchList)
+
+ GlobalData.file_lock = mp.Lock()
+ GlobalData.FfsCmd = CmdListDict
+
+ self.Progress.Stop("done!")
+ MaList = []
+ ExitFlag = threading.Event()
+ ExitFlag.clear()
+ self.AutoGenTime += int(round((time.time() - WorkspaceAutoGenTime)))
+ for Arch in Wa.ArchList:
+ AutoGenStart = time.time()
+ GlobalData.gGlobalDefines['ARCH'] = Arch
+ Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)
+ for Module in Pa.Platform.Modules:
+ if self.ModuleFile.Dir == Module.Dir and self.ModuleFile.Name == Module.Name:
+ Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
+ if Ma is None:
+ continue
+ if Ma.PcdIsDriver:
+ Ma.PlatformInfo = Pa
+ Ma.Workspace = Wa
+ MaList.append(Ma)
+
+ if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and self.Target in [None, "", "all"]:
+ if Ma.CanSkipbyPreMakeCache():
+ continue
+ else:
+ self.PreMakeCacheMiss.add(Ma)
+
+ # Not to auto-gen for targets 'clean', 'cleanlib', 'cleanall', 'run', 'fds'
+ if self.Target not in ['clean', 'cleanlib', 'cleanall', 'run', 'fds']:
+ # for target which must generate AutoGen code and makefile
+ if not self.SkipAutoGen or self.Target == 'genc':
+ self.Progress.Start("Generating code")
+ Ma.CreateCodeFile(True)
+ self.Progress.Stop("done!")
+ if self.Target == "genc":
+ return True
+ if not self.SkipAutoGen or self.Target == 'genmake':
+ self.Progress.Start("Generating makefile")
+ if CmdListDict and self.Fdf and (Module.Path, Arch) in CmdListDict:
+ Ma.CreateMakeFile(True, CmdListDict[Module.Path, Arch])
+ del CmdListDict[Module.Path, Arch]
+ else:
+ Ma.CreateMakeFile(True)
+ self.Progress.Stop("done!")
+ if self.Target == "genmake":
+ return True
+
+ if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:
+ if Ma.CanSkipbyMakeCache():
+ continue
+ else:
+ self.MakeCacheMiss.add(Ma)
+
+ self.BuildModules.append(Ma)
+ self.AutoGenTime += int(round((time.time() - AutoGenStart)))
+ MakeStart = time.time()
+ for Ma in self.BuildModules:
+ if not Ma.IsBinaryModule:
+ Bt = BuildTask.New(ModuleMakeUnit(Ma, Pa.BuildCommand,self.Target))
+ # Break build if any build thread has error
+ if BuildTask.HasError():
+ # we need a full version of makefile for platform
+ ExitFlag.set()
+ BuildTask.WaitForComplete()
+ Pa.CreateMakeFile(False)
+ EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
+ # Start task scheduler
+ if not BuildTask.IsOnGoing():
+ BuildTask.StartScheduler(self.ThreadNumber, ExitFlag)
+
+ # in case there's an interruption. we need a full version of makefile for platform
+ Pa.CreateMakeFile(False)
+ if BuildTask.HasError():
+ EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
+ self.MakeTime += int(round((time.time() - MakeStart)))
+
+ MakeContiue = time.time()
+ ExitFlag.set()
+ BuildTask.WaitForComplete()
+ self.CreateAsBuiltInf()
+ if GlobalData.gBinCacheDest:
+ self.GenDestCache()
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
+ # Only for --hash
+ # Update PreMakeCacheChain files
+ self.GenLocalPreMakeCache()
+ self.BuildModules = []
+ self.MakeTime += int(round((time.time() - MakeContiue)))
+ if BuildTask.HasError():
+ EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
+
+ self.BuildReport.AddPlatformReport(Wa, MaList)
+ if MaList == []:
+ EdkLogger.error(
+ 'build',
+ BUILD_ERROR,
+ "Module for [%s] is not a component of active platform."\
+ " Please make sure that the ARCH and inf file path are"\
+ " given in the same as in [%s]" % \
+ (', '.join(Wa.ArchList), self.PlatformFile),
+ ExtraData=self.ModuleFile
+ )
+ # Create MAP file when Load Fix Address is enabled.
+ if self.Target == "fds" and self.Fdf:
+ for Arch in Wa.ArchList:
+ #
+ # Check whether the set fix address is above 4G for 32bit image.
+ #
+ if (Arch == 'IA32' or Arch == 'ARM') and self.LoadFixAddress != 0xFFFFFFFFFFFFFFFF and self.LoadFixAddress >= 0x100000000:
+ EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS can't be set to larger than or equal to 4G for the platorm with IA32 or ARM arch modules")
+ #
+ # Get Module List
+ #
+ ModuleList = {}
+ for Pa in Wa.AutoGenObjectList:
+ for Ma in Pa.ModuleAutoGenList:
+ if Ma is None:
+ continue
+ if not Ma.IsLibrary:
+ ModuleList[Ma.Guid.upper()] = Ma
+
+ MapBuffer = []
+ if self.LoadFixAddress != 0:
+ #
+ # Rebase module to the preferred memory address before GenFds
+ #
+ self._CollectModuleMapBuffer(MapBuffer, ModuleList)
+ #
+ # create FDS again for the updated EFI image
+ #
+ GenFdsStart = time.time()
+ self._Build("fds", Wa)
+ self.GenFdsTime += int(round((time.time() - GenFdsStart)))
+ #
+ # Create MAP file for all platform FVs after GenFds.
+ #
+ self._CollectFvMapBuffer(MapBuffer, Wa, ModuleList)
+ #
+ # Save MAP buffer into MAP file.
+ #
+ self._SaveMapFile (MapBuffer, Wa)
+
+ def _GenFfsCmd(self,ArchList):
+ # convert dictionary of Cmd:(Inf,Arch)
+ # to a new dictionary of (Inf,Arch):Cmd,Cmd,Cmd...
+ CmdSetDict = defaultdict(set)
+ GenFfsDict = GenFds.GenFfsMakefile('', GlobalData.gFdfParser, self, ArchList, GlobalData)
+ for Cmd in GenFfsDict:
+ tmpInf, tmpArch = GenFfsDict[Cmd]
+ CmdSetDict[tmpInf, tmpArch].add(Cmd)
+ return CmdSetDict
+ def VerifyAutoGenFiles(self):
+ AutoGenIdFile = os.path.join(GlobalData.gConfDirectory,".AutoGenIdFile.txt")
+ try:
+ with open(AutoGenIdFile) as fd:
+ lines = fd.readlines()
+ except:
+ return None
+ for line in lines:
+ if "Arch" in line:
+ ArchList = line.strip().split("=")[1].split("|")
+ if "BuildDir" in line:
+ BuildDir = line.split("=")[1].strip()
+ if "PlatformGuid" in line:
+ PlatformGuid = line.split("=")[1].strip()
+ GlobalVarList = []
+ for arch in ArchList:
+ global_var = os.path.join(BuildDir, "GlobalVar_%s_%s.bin" % (str(PlatformGuid),arch))
+ if not os.path.exists(global_var):
+ return None
+ GlobalVarList.append(global_var)
+ for global_var in GlobalVarList:
+ data_pipe = MemoryDataPipe()
+ data_pipe.load(global_var)
+ target = data_pipe.Get("P_Info").get("Target")
+ toolchain = data_pipe.Get("P_Info").get("ToolChain")
+ archlist = data_pipe.Get("P_Info").get("ArchList")
+ Arch = data_pipe.Get("P_Info").get("Arch")
+ active_p = data_pipe.Get("P_Info").get("ActivePlatform")
+ workspacedir = data_pipe.Get("P_Info").get("WorkspaceDir")
+ PackagesPath = os.getenv("PACKAGES_PATH")
+ mws.setWs(workspacedir, PackagesPath)
+ LibraryBuildDirectoryList = data_pipe.Get("LibraryBuildDirectoryList")
+ ModuleBuildDirectoryList = data_pipe.Get("ModuleBuildDirectoryList")
+
+ for m_build_dir in LibraryBuildDirectoryList:
+ if not os.path.exists(os.path.join(m_build_dir,self.MakeFileName)):
+ return None
+ for m_build_dir in ModuleBuildDirectoryList:
+ if not os.path.exists(os.path.join(m_build_dir,self.MakeFileName)):
+ return None
+ Wa = WorkSpaceInfo(
+ workspacedir,active_p,target,toolchain,archlist
+ )
+ Pa = PlatformInfo(Wa, active_p, target, toolchain, Arch,data_pipe)
+ Wa.AutoGenObjectList.append(Pa)
+ return Wa
+ def SetupMakeSetting(self,Wa):
+ BuildModules = []
+ for Pa in Wa.AutoGenObjectList:
+ for m in Pa._MbList:
+ ma = ModuleAutoGen(Wa,m.MetaFile, Pa.BuildTarget, Wa.ToolChain, Pa.Arch, Pa.MetaFile,Pa.DataPipe)
+ BuildModules.append(ma)
+ fdf_file = Wa.FlashDefinition
+ if fdf_file:
+ Fdf = FdfParser(fdf_file.Path)
+ Fdf.ParseFile()
+ GlobalData.gFdfParser = Fdf
+ if Fdf.CurrentFdName and Fdf.CurrentFdName in Fdf.Profile.FdDict:
+ FdDict = Fdf.Profile.FdDict[Fdf.CurrentFdName]
+ for FdRegion in FdDict.RegionList:
+ if str(FdRegion.RegionType) == 'FILE' and self.Platform.VpdToolGuid in str(FdRegion.RegionDataList):
+ if int(FdRegion.Offset) % 8 != 0:
+ EdkLogger.error("build", FORMAT_INVALID, 'The VPD Base Address %s must be 8-byte aligned.' % (FdRegion.Offset))
+ Wa.FdfProfile = Fdf.Profile
+ self.Fdf = Fdf
+ else:
+ self.Fdf = None
+ return BuildModules
+
+ ## Build a platform in multi-thread mode
+ #
+ def PerformAutoGen(self,BuildTarget,ToolChain):
+ WorkspaceAutoGenTime = time.time()
+ Wa = WorkspaceAutoGen(
+ self.WorkspaceDir,
+ self.PlatformFile,
+ BuildTarget,
+ ToolChain,
+ self.ArchList,
+ self.BuildDatabase,
+ self.TargetTxt,
+ self.ToolDef,
+ self.Fdf,
+ self.FdList,
+ self.FvList,
+ self.CapList,
+ self.SkuId,
+ self.UniFlag,
+ self.Progress
+ )
+ self.Fdf = Wa.FdfFile
+ self.LoadFixAddress = Wa.Platform.LoadFixAddress
+ self.BuildReport.AddPlatformReport(Wa)
+ Wa.CreateMakeFile(False)
+
+ # Add ffs build to makefile
+ CmdListDict = {}
+ if GlobalData.gEnableGenfdsMultiThread and self.Fdf:
+ CmdListDict = self._GenFfsCmd(Wa.ArchList)
+
+ self.AutoGenTime += int(round((time.time() - WorkspaceAutoGenTime)))
+ BuildModules = []
+ for Arch in Wa.ArchList:
+ PcdMaList = []
+ AutoGenStart = time.time()
+ GlobalData.gGlobalDefines['ARCH'] = Arch
+ Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)
+ if Pa is None:
+ continue
+ ModuleList = []
+ for Inf in Pa.Platform.Modules:
+ ModuleList.append(Inf)
+ # Add the INF only list in FDF
+ if GlobalData.gFdfParser is not None:
+ for InfName in GlobalData.gFdfParser.Profile.InfList:
+ Inf = PathClass(NormPath(InfName), self.WorkspaceDir, Arch)
+ if Inf in Pa.Platform.Modules:
+ continue
+ ModuleList.append(Inf)
+ Pa.DataPipe.DataContainer = {"FfsCommand":CmdListDict}
+ Pa.DataPipe.DataContainer = {"Workspace_timestamp": Wa._SrcTimeStamp}
+ Pa.DataPipe.DataContainer = {"CommandTarget": self.Target}
+ Pa.CreateLibModuelDirs()
+ # Fetch the MakeFileName.
+ self.MakeFileName = Pa.MakeFileName
+ if not self.MakeFileName:
+ self.MakeFileName = Pa.MakeFile
+
+ Pa.DataPipe.DataContainer = {"LibraryBuildDirectoryList":Pa.LibraryBuildDirectoryList}
+ Pa.DataPipe.DataContainer = {"ModuleBuildDirectoryList":Pa.ModuleBuildDirectoryList}
+ Pa.DataPipe.DataContainer = {"FdsCommandDict": Wa.GenFdsCommandDict}
+ # Prepare the cache share data for multiprocessing
+ Pa.DataPipe.DataContainer = {"gPlatformHashFile":GlobalData.gPlatformHashFile}
+ ModuleCodaFile = {}
+ for ma in Pa.ModuleAutoGenList:
+ ModuleCodaFile[(ma.MetaFile.File,ma.MetaFile.Root,ma.Arch,ma.MetaFile.Path)] = [item.Target for item in ma.CodaTargetList]
+ Pa.DataPipe.DataContainer = {"ModuleCodaFile":ModuleCodaFile}
+ # ModuleList contains all driver modules only
+ for Module in ModuleList:
+ # Get ModuleAutoGen object to generate C code file and makefile
+ Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
+ if Ma is None:
+ continue
+ if Ma.PcdIsDriver:
+ Ma.PlatformInfo = Pa
+ Ma.Workspace = Wa
+ PcdMaList.append(Ma)
+ self.AllDrivers.add(Ma)
+ self.AllModules.add(Ma)
+
+ mqueue = mp.Queue()
+ cqueue = mp.Queue()
+ for m in Pa.GetAllModuleInfo:
+ mqueue.put(m)
+ module_file,module_root,module_path,module_basename,\
+ module_originalpath,module_arch,IsLib = m
+ Ma = ModuleAutoGen(Wa, PathClass(module_path, Wa), BuildTarget,\
+ ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
+ self.AllModules.add(Ma)
+ data_pipe_file = os.path.join(Pa.BuildDir, "GlobalVar_%s_%s.bin" % (str(Pa.Guid),Pa.Arch))
+ Pa.DataPipe.dump(data_pipe_file)
+
+ mqueue.put((None,None,None,None,None,None,None))
+ autogen_rt, errorcode = self.StartAutoGen(mqueue, Pa.DataPipe, self.SkipAutoGen, PcdMaList, cqueue)
+
+ if not autogen_rt:
+ self.AutoGenMgr.TerminateWorkers()
+ self.AutoGenMgr.join(1)
+ raise FatalError(errorcode)
+
+ if GlobalData.gUseHashCache:
+ for item in GlobalData.gModuleAllCacheStatus:
+ (MetaFilePath, Arch, CacheStr, Status) = item
+ Ma = ModuleAutoGen(Wa, PathClass(MetaFilePath, Wa), BuildTarget,\
+ ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
+ if CacheStr == "PreMakeCache" and Status == False:
+ self.PreMakeCacheMiss.add(Ma)
+ if CacheStr == "PreMakeCache" and Status == True:
+ self.PreMakeCacheHit.add(Ma)
+ GlobalData.gModuleCacheHit.add(Ma)
+ if CacheStr == "MakeCache" and Status == False:
+ self.MakeCacheMiss.add(Ma)
+ if CacheStr == "MakeCache" and Status == True:
+ self.MakeCacheHit.add(Ma)
+ GlobalData.gModuleCacheHit.add(Ma)
+ self.AutoGenTime += int(round((time.time() - AutoGenStart)))
+ AutoGenIdFile = os.path.join(GlobalData.gConfDirectory,".AutoGenIdFile.txt")
+ with open(AutoGenIdFile,"w") as fw:
+ fw.write("Arch=%s\n" % "|".join((Wa.ArchList)))
+ fw.write("BuildDir=%s\n" % Wa.BuildDir)
+ fw.write("PlatformGuid=%s\n" % str(Wa.AutoGenObjectList[0].Guid))
+
+ if GlobalData.gBinCacheSource:
+ BuildModules.extend(self.MakeCacheMiss)
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheDest:
+ BuildModules.extend(self.PreMakeCacheMiss)
+ else:
+ BuildModules.extend(self.AllDrivers)
+
+ self.Progress.Stop("done!")
+ return Wa, BuildModules
+
+ def _MultiThreadBuildPlatform(self):
+ SaveFileOnChange(self.PlatformBuildPath, '# DO NOT EDIT \n# FILE auto-generated\n', False)
+ for BuildTarget in self.BuildTargetList:
+ GlobalData.gGlobalDefines['TARGET'] = BuildTarget
+ index = 0
+ for ToolChain in self.ToolChainList:
+ resetFdsGlobalVariable()
+ GlobalData.gGlobalDefines['TOOLCHAIN'] = ToolChain
+ GlobalData.gGlobalDefines['TOOL_CHAIN_TAG'] = ToolChain
+ GlobalData.gGlobalDefines['FAMILY'] = self.ToolChainFamily[index]
+ index += 1
+ ExitFlag = threading.Event()
+ ExitFlag.clear()
+ if self.SkipAutoGen:
+ Wa = self.VerifyAutoGenFiles()
+ if Wa is None:
+ self.SkipAutoGen = False
+ Wa, self.BuildModules = self.PerformAutoGen(BuildTarget,ToolChain)
+ else:
+ GlobalData.gAutoGenPhase = True
+ self.BuildModules = self.SetupMakeSetting(Wa)
+ else:
+ Wa, self.BuildModules = self.PerformAutoGen(BuildTarget,ToolChain)
+ Pa = Wa.AutoGenObjectList[0]
+ GlobalData.gAutoGenPhase = False
+
+ if GlobalData.gBinCacheSource:
+ EdkLogger.quiet("[cache Summary]: Total module num: %s" % len(self.AllModules))
+ EdkLogger.quiet("[cache Summary]: PreMakecache miss num: %s " % len(self.PreMakeCacheMiss))
+ EdkLogger.quiet("[cache Summary]: Makecache miss num: %s " % len(self.MakeCacheMiss))
+
+ for Arch in Wa.ArchList:
+ MakeStart = time.time()
+ for Ma in set(self.BuildModules):
+ # Generate build task for the module
+ if not Ma.IsBinaryModule:
+ Bt = BuildTask.New(ModuleMakeUnit(Ma, Pa.BuildCommand,self.Target))
+ # Break build if any build thread has error
+ if BuildTask.HasError():
+ # we need a full version of makefile for platform
+ ExitFlag.set()
+ BuildTask.WaitForComplete()
+ Pa.CreateMakeFile(False)
+ EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
+ # Start task scheduler
+ if not BuildTask.IsOnGoing():
+ BuildTask.StartScheduler(self.ThreadNumber, ExitFlag)
+
+ # in case there's an interruption. we need a full version of makefile for platform
+
+ if BuildTask.HasError():
+ EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
+ self.MakeTime += int(round((time.time() - MakeStart)))
+
+ MakeContiue = time.time()
+ #
+ #
+ # All modules have been put in build tasks queue. Tell task scheduler
+ # to exit if all tasks are completed
+ #
+ ExitFlag.set()
+ BuildTask.WaitForComplete()
+ if GlobalData.gBinCacheDest:
+ self.GenDestCache()
+ elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
+ # Only for --hash
+ # Update PreMakeCacheChain files
+ self.GenLocalPreMakeCache()
+ #
+ # Get Module List
+ #
+ ModuleList = {ma.Guid.upper(): ma for ma in self.BuildModules}
+ self.BuildModules = []
+ self.MakeTime += int(round((time.time() - MakeContiue)))
+ #
+ # Check for build error, and raise exception if one
+ # has been signaled.
+ #
+ if BuildTask.HasError():
+ EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
+
+ # Create MAP file when Load Fix Address is enabled.
+ if self.Target in ["", "all", "fds"]:
+ for Arch in Wa.ArchList:
+ #
+ # Check whether the set fix address is above 4G for 32bit image.
+ #
+ if (Arch == 'IA32' or Arch == 'ARM') and self.LoadFixAddress != 0xFFFFFFFFFFFFFFFF and self.LoadFixAddress >= 0x100000000:
+ EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS can't be set to larger than or equal to 4G for the platorm with IA32 or ARM arch modules")
+
+ #
+ # Rebase module to the preferred memory address before GenFds
+ #
+ MapBuffer = []
+ if self.LoadFixAddress != 0:
+ self._CollectModuleMapBuffer(MapBuffer, ModuleList)
+
+ if self.Fdf:
+ #
+ # Generate FD image if there's a FDF file found
+ #
+ GenFdsStart = time.time()
+ if GenFdsApi(Wa.GenFdsCommandDict, self.Db):
+ EdkLogger.error("build", COMMAND_FAILURE)
+ Threshold = self.GetFreeSizeThreshold()
+ if Threshold:
+ self.CheckFreeSizeThreshold(Threshold, Wa.FvDir)
+
+ #
+ # Create MAP file for all platform FVs after GenFds.
+ #
+ self._CollectFvMapBuffer(MapBuffer, Wa, ModuleList)
+ self.GenFdsTime += int(round((time.time() - GenFdsStart)))
+ #
+ # Save MAP buffer into MAP file.
+ #
+ self._SaveMapFile(MapBuffer, Wa)
+ self.CreateGuidedSectionToolsFile(Wa)
+
+ ## GetFreeSizeThreshold()
+ #
+ # @retval int Threshold value
+ #
+ def GetFreeSizeThreshold(self):
+ Threshold = None
+ Threshold_Str = GlobalData.gCommandLineDefines.get('FV_SPARE_SPACE_THRESHOLD')
+ if Threshold_Str:
+ try:
+ if Threshold_Str.lower().startswith('0x'):
+ Threshold = int(Threshold_Str, 16)
+ else:
+ Threshold = int(Threshold_Str)
+ except:
+ EdkLogger.warn("build", 'incorrect value for FV_SPARE_SPACE_THRESHOLD %s.Only decimal or hex format is allowed.' % Threshold_Str)
+ return Threshold
+
+ def CheckFreeSizeThreshold(self, Threshold=None, FvDir=None):
+ if not isinstance(Threshold, int):
+ return
+ if not isinstance(FvDir, str) or not FvDir:
+ return
+ FdfParserObject = GlobalData.gFdfParser
+ FvRegionNameList = [FvName for FvName in FdfParserObject.Profile.FvDict if FdfParserObject.Profile.FvDict[FvName].FvRegionInFD]
+ for FvName in FdfParserObject.Profile.FvDict:
+ if FvName in FvRegionNameList:
+ FvSpaceInfoFileName = os.path.join(FvDir, FvName.upper() + '.Fv.map')
+ if os.path.exists(FvSpaceInfoFileName):
+ FileLinesList = getlines(FvSpaceInfoFileName)
+ for Line in FileLinesList:
+ NameValue = Line.split('=')
+ if len(NameValue) == 2 and NameValue[0].strip() == 'EFI_FV_SPACE_SIZE':
+ FreeSizeValue = int(NameValue[1].strip(), 0)
+ if FreeSizeValue < Threshold:
+ EdkLogger.error("build", FV_FREESIZE_ERROR,
+ '%s FV free space %d is not enough to meet with the required spare space %d set by -D FV_SPARE_SPACE_THRESHOLD option.' % (
+ FvName, FreeSizeValue, Threshold))
+ break
+
+ ## Generate GuidedSectionTools.txt in the FV directories.
+ #
+ def CreateGuidedSectionToolsFile(self,Wa):
+ for BuildTarget in self.BuildTargetList:
+ for ToolChain in self.ToolChainList:
+ FvDir = Wa.FvDir
+ if not os.path.exists(FvDir):
+ continue
+ for Arch in self.ArchList:
+ guidList = []
+ tooldefguidList = []
+ guidAttribs = []
+ for Platform in Wa.AutoGenObjectList:
+ if Platform.BuildTarget != BuildTarget:
+ continue
+ if Platform.ToolChain != ToolChain:
+ continue
+ if Platform.Arch != Arch:
+ continue
+ if hasattr (Platform, 'BuildOption'):
+ for Tool in Platform.BuildOption:
+ if 'GUID' in Platform.BuildOption[Tool]:
+ if 'PATH' in Platform.BuildOption[Tool]:
+ value = Platform.BuildOption[Tool]['GUID']
+ if value in guidList:
+ EdkLogger.error("build", FORMAT_INVALID, "Duplicate GUID value %s used with Tool %s in DSC [BuildOptions]." % (value, Tool))
+ path = Platform.BuildOption[Tool]['PATH']
+ guidList.append(value)
+ guidAttribs.append((value, Tool, path))
+ for Tool in Platform.ToolDefinition:
+ if 'GUID' in Platform.ToolDefinition[Tool]:
+ if 'PATH' in Platform.ToolDefinition[Tool]:
+ value = Platform.ToolDefinition[Tool]['GUID']
+ if value in tooldefguidList:
+ EdkLogger.error("build", FORMAT_INVALID, "Duplicate GUID value %s used with Tool %s in tools_def.txt." % (value, Tool))
+ tooldefguidList.append(value)
+ if value in guidList:
+ # Already added by platform
+ continue
+ path = Platform.ToolDefinition[Tool]['PATH']
+ guidList.append(value)
+ guidAttribs.append((value, Tool, path))
+ # Sort by GuidTool name
+ guidAttribs = sorted (guidAttribs, key=lambda x: x[1])
+ # Write out GuidedSecTools.txt
+ toolsFile = os.path.join(FvDir, 'GuidedSectionTools.txt')
+ toolsFile = open(toolsFile, 'wt')
+ for guidedSectionTool in guidAttribs:
+ print(' '.join(guidedSectionTool), file=toolsFile)
+ toolsFile.close()
+
+ ## Returns the real path of the tool.
+ #
+ def GetRealPathOfTool (self, tool):
+ if os.path.exists(tool):
+ return os.path.realpath(tool)
+ return tool
+
+ ## Launch the module or platform build
+ #
+ def Launch(self):
+ self.AllDrivers = set()
+ self.AllModules = set()
+ self.PreMakeCacheMiss = set()
+ self.PreMakeCacheHit = set()
+ self.MakeCacheMiss = set()
+ self.MakeCacheHit = set()
+ if not self.ModuleFile:
+ if not self.SpawnMode or self.Target not in ["", "all"]:
+ self.SpawnMode = False
+ self._BuildPlatform()
+ else:
+ self._MultiThreadBuildPlatform()
+ else:
+ self.SpawnMode = False
+ self._BuildModule()
+
+ if self.Target == 'cleanall':
+ RemoveDirectory(os.path.dirname(GlobalData.gDatabasePath), True)
+
+ def CreateAsBuiltInf(self):
+ for Module in self.BuildModules:
+ Module.CreateAsBuiltInf()
+
+ def GenDestCache(self):
+ for Module in self.AllModules:
+ Module.GenPreMakefileHashList()
+ Module.GenMakefileHashList()
+ Module.CopyModuleToCache()
+
+ def GenLocalPreMakeCache(self):
+ for Module in self.PreMakeCacheMiss:
+ Module.GenPreMakefileHashList()
+
+ ## Do some clean-up works when error occurred
+ def Relinquish(self):
+ OldLogLevel = EdkLogger.GetLevel()
+ EdkLogger.SetLevel(EdkLogger.ERROR)
+ Utils.Progressor.Abort()
+ if self.SpawnMode == True:
+ BuildTask.Abort()
+ EdkLogger.SetLevel(OldLogLevel)
+
+def ParseDefines(DefineList=[]):
+ DefineDict = {}
+ if DefineList is not None:
+ for Define in DefineList:
+ DefineTokenList = Define.split("=", 1)
+ if not GlobalData.gMacroNamePattern.match(DefineTokenList[0]):
+ EdkLogger.error('build', FORMAT_INVALID,
+ "The macro name must be in the pattern [A-Z][A-Z0-9_]*",
+ ExtraData=DefineTokenList[0])
+
+ if len(DefineTokenList) == 1:
+ DefineDict[DefineTokenList[0]] = "TRUE"
+ else:
+ DefineDict[DefineTokenList[0]] = DefineTokenList[1].strip()
+ return DefineDict
+
+
+
+def LogBuildTime(Time):
+ if Time:
+ TimeDurStr = ''
+ TimeDur = time.gmtime(Time)
+ if TimeDur.tm_yday > 1:
+ TimeDurStr = time.strftime("%H:%M:%S", TimeDur) + ", %d day(s)" % (TimeDur.tm_yday - 1)
+ else:
+ TimeDurStr = time.strftime("%H:%M:%S", TimeDur)
+ return TimeDurStr
+ else:
+ return None
+def ThreadNum():
+ OptionParser = MyOptionParser()
+ if not OptionParser.BuildOption and not OptionParser.BuildTarget:
+ OptionParser.GetOption()
+ BuildOption, BuildTarget = OptionParser.BuildOption, OptionParser.BuildTarget
+ ThreadNumber = BuildOption.ThreadNumber
+ GlobalData.gCmdConfDir = BuildOption.ConfDirectory
+ if ThreadNumber is None:
+ TargetObj = TargetTxtDict()
+ ThreadNumber = TargetObj.Target.TargetTxtDictionary[TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER]
+ if ThreadNumber == '':
+ ThreadNumber = 0
+ else:
+ ThreadNumber = int(ThreadNumber, 0)
+
+ if ThreadNumber == 0:
+ try:
+ ThreadNumber = multiprocessing.cpu_count()
+ except (ImportError, NotImplementedError):
+ ThreadNumber = 1
+ return ThreadNumber
+## Tool entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @retval 0 Tool was successful
+# @retval 1 Tool failed
+#
+LogQMaxSize = ThreadNum() * 10
+def Main():
+ StartTime = time.time()
+
+ #
+ # Create a log Queue
+ #
+ LogQ = mp.Queue(LogQMaxSize)
+ # Initialize log system
+ EdkLogger.LogClientInitialize(LogQ)
+ GlobalData.gCommand = sys.argv[1:]
+ #
+ # Parse the options and args
+ #
+ OptionParser = MyOptionParser()
+ if not OptionParser.BuildOption and not OptionParser.BuildTarget:
+ OptionParser.GetOption()
+ Option, Target = OptionParser.BuildOption, OptionParser.BuildTarget
+ GlobalData.gOptions = Option
+ GlobalData.gCaseInsensitive = Option.CaseInsensitive
+
+ # Set log level
+ LogLevel = EdkLogger.INFO
+ if Option.verbose is not None:
+ EdkLogger.SetLevel(EdkLogger.VERBOSE)
+ LogLevel = EdkLogger.VERBOSE
+ elif Option.quiet is not None:
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ LogLevel = EdkLogger.QUIET
+ elif Option.debug is not None:
+ EdkLogger.SetLevel(Option.debug + 1)
+ LogLevel = Option.debug + 1
+ else:
+ EdkLogger.SetLevel(EdkLogger.INFO)
+
+ if Option.WarningAsError == True:
+ EdkLogger.SetWarningAsError()
+ Log_Agent = LogAgent(LogQ,LogLevel,Option.LogFile)
+ Log_Agent.start()
+
+ if platform.platform().find("Windows") >= 0:
+ GlobalData.gIsWindows = True
+ else:
+ GlobalData.gIsWindows = False
+
+ EdkLogger.quiet("Build environment: %s" % platform.platform())
+ EdkLogger.quiet(time.strftime("Build start time: %H:%M:%S, %b.%d %Y\n", time.localtime()));
+ ReturnCode = 0
+ MyBuild = None
+ BuildError = True
+ try:
+ if len(Target) == 0:
+ Target = "all"
+ elif len(Target) >= 2:
+ EdkLogger.error("build", OPTION_NOT_SUPPORTED, "More than one targets are not supported.",
+ ExtraData="Please select one of: %s" % (' '.join(gSupportedTarget)))
+ else:
+ Target = Target[0].lower()
+
+ if Target not in gSupportedTarget:
+ EdkLogger.error("build", OPTION_NOT_SUPPORTED, "Not supported target [%s]." % Target,
+ ExtraData="Please select one of: %s" % (' '.join(gSupportedTarget)))
+
+ #
+ # Check environment variable: EDK_TOOLS_PATH, WORKSPACE, PATH
+ #
+ CheckEnvVariable()
+ GlobalData.gCommandLineDefines.update(ParseDefines(Option.Macros))
+
+ Workspace = os.getenv("WORKSPACE")
+ #
+ # Get files real name in workspace dir
+ #
+ GlobalData.gAllFiles = Utils.DirCache(Workspace)
+
+ WorkingDirectory = os.getcwd()
+ if not Option.ModuleFile:
+ FileList = glob.glob(os.path.normpath(os.path.join(WorkingDirectory, '*.inf')))
+ FileNum = len(FileList)
+ if FileNum >= 2:
+ EdkLogger.error("build", OPTION_NOT_SUPPORTED, "There are %d INF files in %s." % (FileNum, WorkingDirectory),
+ ExtraData="Please use '-m <INF_FILE_PATH>' switch to choose one.")
+ elif FileNum == 1:
+ Option.ModuleFile = NormFile(FileList[0], Workspace)
+
+ if Option.ModuleFile:
+ if os.path.isabs (Option.ModuleFile):
+ if os.path.normcase (os.path.normpath(Option.ModuleFile)).find (Workspace) == 0:
+ Option.ModuleFile = NormFile(os.path.normpath(Option.ModuleFile), Workspace)
+ Option.ModuleFile = PathClass(Option.ModuleFile, Workspace)
+ ErrorCode, ErrorInfo = Option.ModuleFile.Validate(".inf", False)
+ if ErrorCode != 0:
+ EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo)
+
+ if Option.PlatformFile is not None:
+ if os.path.isabs (Option.PlatformFile):
+ if os.path.normcase (os.path.normpath(Option.PlatformFile)).find (Workspace) == 0:
+ Option.PlatformFile = NormFile(os.path.normpath(Option.PlatformFile), Workspace)
+ Option.PlatformFile = PathClass(Option.PlatformFile, Workspace)
+
+ if Option.FdfFile is not None:
+ if os.path.isabs (Option.FdfFile):
+ if os.path.normcase (os.path.normpath(Option.FdfFile)).find (Workspace) == 0:
+ Option.FdfFile = NormFile(os.path.normpath(Option.FdfFile), Workspace)
+ Option.FdfFile = PathClass(Option.FdfFile, Workspace)
+ ErrorCode, ErrorInfo = Option.FdfFile.Validate(".fdf", False)
+ if ErrorCode != 0:
+ EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo)
+
+ if Option.Flag is not None and Option.Flag not in ['-c', '-s']:
+ EdkLogger.error("build", OPTION_VALUE_INVALID, "UNI flag must be one of -c or -s")
+
+ MyBuild = Build(Target, Workspace, Option,LogQ)
+ GlobalData.gCommandLineDefines['ARCH'] = ' '.join(MyBuild.ArchList)
+ if not (MyBuild.LaunchPrebuildFlag and os.path.exists(MyBuild.PlatformBuildPath)):
+ MyBuild.Launch()
+
+ #
+ # All job done, no error found and no exception raised
+ #
+ BuildError = False
+ except FatalError as X:
+ if MyBuild is not None:
+ # for multi-thread build exits safely
+ MyBuild.Relinquish()
+ if Option is not None and Option.debug is not None:
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ ReturnCode = X.args[0]
+ except Warning as X:
+ # error from Fdf parser
+ if MyBuild is not None:
+ # for multi-thread build exits safely
+ MyBuild.Relinquish()
+ if Option is not None and Option.debug is not None:
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ else:
+ EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)
+ ReturnCode = FORMAT_INVALID
+ except KeyboardInterrupt:
+ if MyBuild is not None:
+
+ # for multi-thread build exits safely
+ MyBuild.Relinquish()
+ ReturnCode = ABORT_ERROR
+ if Option is not None and Option.debug is not None:
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ except:
+ if MyBuild is not None:
+ # for multi-thread build exits safely
+ MyBuild.Relinquish()
+
+ # try to get the meta-file from the object causing exception
+ Tb = sys.exc_info()[-1]
+ MetaFile = GlobalData.gProcessingFile
+ while Tb is not None:
+ if 'self' in Tb.tb_frame.f_locals and hasattr(Tb.tb_frame.f_locals['self'], 'MetaFile'):
+ MetaFile = Tb.tb_frame.f_locals['self'].MetaFile
+ Tb = Tb.tb_next
+ EdkLogger.error(
+ "\nbuild",
+ CODE_ERROR,
+ "Unknown fatal error when processing [%s]" % MetaFile,
+ ExtraData="\n(Please send email to %s for help, attaching following call stack trace!)\n" % MSG_EDKII_MAIL_ADDR,
+ RaiseError=False
+ )
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ ReturnCode = CODE_ERROR
+ finally:
+ Utils.Progressor.Abort()
+ Utils.ClearDuplicatedInf()
+
+ if ReturnCode == 0:
+ try:
+ MyBuild.LaunchPostbuild()
+ Conclusion = "Done"
+ except:
+ Conclusion = "Failed"
+ ReturnCode = POSTBUILD_ERROR
+ elif ReturnCode == ABORT_ERROR:
+ Conclusion = "Aborted"
+ else:
+ Conclusion = "Failed"
+ FinishTime = time.time()
+ BuildDuration = time.gmtime(int(round(FinishTime - StartTime)))
+ BuildDurationStr = ""
+ if BuildDuration.tm_yday > 1:
+ BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration) + ", %d day(s)" % (BuildDuration.tm_yday - 1)
+ else:
+ BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration)
+ if MyBuild is not None:
+ if not BuildError:
+ MyBuild.BuildReport.GenerateReport(BuildDurationStr, LogBuildTime(MyBuild.AutoGenTime), LogBuildTime(MyBuild.MakeTime), LogBuildTime(MyBuild.GenFdsTime))
+
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ EdkLogger.quiet("\n- %s -" % Conclusion)
+ EdkLogger.quiet(time.strftime("Build end time: %H:%M:%S, %b.%d %Y", time.localtime()))
+ EdkLogger.quiet("Build total time: %s\n" % BuildDurationStr)
+ Log_Agent.kill()
+ Log_Agent.join()
+ return ReturnCode
+
+if __name__ == '__main__':
+ try:
+ mp.set_start_method('spawn')
+ except:
+ pass
+ r = Main()
+ ## 0-127 is a safe return range, and 1 is a standard default error
+ if r < 0 or r > 127: r = 1
+ sys.exit(r)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/buildoptions.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/buildoptions.py
new file mode 100755
index 00000000..729584c7
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/build/buildoptions.py
@@ -0,0 +1,105 @@
+## @file
+# build a platform or a module
+#
+# Copyright (c) 2014, Hewlett-Packard Development Company, L.P.<BR>
+# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2018 - 2020, Hewlett Packard Enterprise Development, L.P.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+# Version and Copyright
+from Common.BuildVersion import gBUILD_VERSION
+from optparse import OptionParser
+VersionNumber = "0.60" + ' ' + gBUILD_VERSION
+__version__ = "%prog Version " + VersionNumber
+__copyright__ = "Copyright (c) 2007 - 2018, Intel Corporation All rights reserved."
+
+gParamCheck = []
+def SingleCheckCallback(option, opt_str, value, parser):
+ if option not in gParamCheck:
+ setattr(parser.values, option.dest, value)
+ gParamCheck.append(option)
+ else:
+ parser.error("Option %s only allows one instance in command line!" % option)
+
+
+class MyOptionParser():
+
+ def __new__(cls, *args, **kw):
+ if not hasattr(cls, '_instance'):
+ orig = super(MyOptionParser, cls)
+ cls._instance = orig.__new__(cls, *args, **kw)
+ return cls._instance
+
+ def __init__(self):
+ if not hasattr(self, 'BuildOption'):
+ self.BuildOption = None
+ if not hasattr(self, 'BuildTarget'):
+ self.BuildTarget = None
+
+ def GetOption(self):
+ Parser = OptionParser(description=__copyright__, version=__version__, prog="build.exe", usage="%prog [options] [all|fds|genc|genmake|clean|cleanall|cleanlib|modules|libraries|run]")
+ Parser.add_option("-a", "--arch", action="append", dest="TargetArch",
+ help="ARCHS is one of list: IA32, X64, ARM, AARCH64, RISCV64 or EBC, which overrides target.txt's TARGET_ARCH definition. To specify more archs, please repeat this option.")
+ Parser.add_option("-p", "--platform", action="callback", type="string", dest="PlatformFile", callback=SingleCheckCallback,
+ help="Build the platform specified by the DSC file name argument, overriding target.txt's ACTIVE_PLATFORM definition.")
+ Parser.add_option("-m", "--module", action="callback", type="string", dest="ModuleFile", callback=SingleCheckCallback,
+ help="Build the module specified by the INF file name argument.")
+ Parser.add_option("-b", "--buildtarget", type="string", dest="BuildTarget", help="Using the TARGET to build the platform, overriding target.txt's TARGET definition.",
+ action="append")
+ Parser.add_option("-t", "--tagname", action="append", type="string", dest="ToolChain",
+ help="Using the Tool Chain Tagname to build the platform, overriding target.txt's TOOL_CHAIN_TAG definition.")
+ Parser.add_option("-x", "--sku-id", action="callback", type="string", dest="SkuId", callback=SingleCheckCallback,
+ help="Using this name of SKU ID to build the platform, overriding SKUID_IDENTIFIER in DSC file.")
+
+ Parser.add_option("-n", action="callback", type="int", dest="ThreadNumber", callback=SingleCheckCallback,
+ help="Build the platform using multi-threaded compiler. The value overrides target.txt's MAX_CONCURRENT_THREAD_NUMBER. When value is set to 0, tool automatically detect number of "\
+ "processor threads, set value to 1 means disable multi-thread build, and set value to more than 1 means user specify the threads number to build.")
+
+ Parser.add_option("-f", "--fdf", action="callback", type="string", dest="FdfFile", callback=SingleCheckCallback,
+ help="The name of the FDF file to use, which overrides the setting in the DSC file.")
+ Parser.add_option("-r", "--rom-image", action="append", type="string", dest="RomImage", default=[],
+ help="The name of FD to be generated. The name must be from [FD] section in FDF file.")
+ Parser.add_option("-i", "--fv-image", action="append", type="string", dest="FvImage", default=[],
+ help="The name of FV to be generated. The name must be from [FV] section in FDF file.")
+ Parser.add_option("-C", "--capsule-image", action="append", type="string", dest="CapName", default=[],
+ help="The name of Capsule to be generated. The name must be from [Capsule] section in FDF file.")
+ Parser.add_option("-u", "--skip-autogen", action="store_true", dest="SkipAutoGen", help="Skip AutoGen step.")
+ Parser.add_option("-e", "--re-parse", action="store_true", dest="Reparse", help="Re-parse all meta-data files.")
+
+ Parser.add_option("-c", "--case-insensitive", action="store_true", dest="CaseInsensitive", default=False, help="Don't check case of file name.")
+
+ Parser.add_option("-w", "--warning-as-error", action="store_true", dest="WarningAsError", help="Treat warning in tools as error.")
+ Parser.add_option("-j", "--log", action="store", dest="LogFile", help="Put log in specified file as well as on console.")
+
+ Parser.add_option("-s", "--silent", action="store_true", type=None, dest="SilentMode",
+ help="Make use of silent mode of (n)make.")
+ Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
+ Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed, "\
+ "including library instances selected, final dependency expression, "\
+ "and warning messages, etc.")
+ Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
+ Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".")
+
+ Parser.add_option("-y", "--report-file", action="store", dest="ReportFile", help="Create/overwrite the report to the specified filename.")
+ Parser.add_option("-Y", "--report-type", action="append", type="choice", choices=['PCD', 'LIBRARY', 'FLASH', 'DEPEX', 'BUILD_FLAGS', 'FIXED_ADDRESS', 'HASH', 'EXECUTION_ORDER'], dest="ReportType", default=[],
+ help="Flags that control the type of build report to generate. Must be one of: [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS, HASH, EXECUTION_ORDER]. "\
+ "To specify more than one flag, repeat this option on the command line and the default flag set is [PCD, LIBRARY, FLASH, DEPEX, HASH, BUILD_FLAGS, FIXED_ADDRESS]")
+ Parser.add_option("-F", "--flag", action="store", type="string", dest="Flag",
+ help="Specify the specific option to parse EDK UNI file. Must be one of: [-c, -s]. -c is for EDK framework UNI file, and -s is for EDK UEFI UNI file. "\
+ "This option can also be specified by setting *_*_*_BUILD_FLAGS in [BuildOptions] section of platform DSC. If they are both specified, this value "\
+ "will override the setting in [BuildOptions] section of platform DSC.")
+ Parser.add_option("-N", "--no-cache", action="store_true", dest="DisableCache", default=False, help="Disable build cache mechanism")
+ Parser.add_option("--conf", action="store", type="string", dest="ConfDirectory", help="Specify the customized Conf directory.")
+ Parser.add_option("--check-usage", action="store_true", dest="CheckUsage", default=False, help="Check usage content of entries listed in INF file.")
+ Parser.add_option("--ignore-sources", action="store_true", dest="IgnoreSources", default=False, help="Focus to a binary build and ignore all source files")
+ Parser.add_option("--pcd", action="append", dest="OptionPcd", help="Set PCD value by command line. Format: \"PcdName=Value\" ")
+ Parser.add_option("-l", "--cmd-len", action="store", type="int", dest="CommandLength", help="Specify the maximum line length of build command. Default is 4096.")
+ Parser.add_option("--hash", action="store_true", dest="UseHashCache", default=False, help="Enable hash-based caching during build process.")
+ Parser.add_option("--binary-destination", action="store", type="string", dest="BinCacheDest", help="Generate a cache of binary files in the specified directory.")
+ Parser.add_option("--binary-source", action="store", type="string", dest="BinCacheSource", help="Consume a cache of binary files from the specified directory.")
+ Parser.add_option("--genfds-multi-thread", action="store_true", dest="GenfdsMultiThread", default=True, help="Enable GenFds multi thread to generate ffs file.")
+ Parser.add_option("--no-genfds-multi-thread", action="store_true", dest="NoGenfdsMultiThread", default=False, help="Disable GenFds multi thread to generate ffs file.")
+ Parser.add_option("--disable-include-path-check", action="store_true", dest="DisableIncludePathCheck", default=False, help="Disable the include path check for outside of package.")
+ self.BuildOption, self.BuildTarget = Parser.parse_args()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/sitecustomize.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/sitecustomize.py
new file mode 100644
index 00000000..7e82f53c
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/sitecustomize.py
@@ -0,0 +1,15 @@
+## @file
+#
+#
+# Copyright (c) 2009 - 2014, Apple Inc. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+import sys
+import locale
+
+if sys.platform == "darwin" and sys.version_info[0] < 3:
+ DefaultLocal = locale.getdefaultlocale()[1]
+ if DefaultLocal is None:
+ DefaultLocal = 'UTF8'
+ sys.setdefaultencoding(DefaultLocal)
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/tests/Split/test_split.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/tests/Split/test_split.py
new file mode 100644
index 00000000..4e19e636
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/tests/Split/test_split.py
@@ -0,0 +1,115 @@
+# @file
+# Split a file into two pieces at the request offset.
+#
+# Copyright (c) 2021, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+# Import Modules
+import unittest
+import tempfile
+import os
+import shutil
+import Split.Split as sp
+import struct as st
+
+
+class TestSplit(unittest.TestCase):
+ def setUp(self):
+ self.tmpdir = tempfile.mkdtemp()
+ self.binary_file = os.path.join(self.tmpdir, "Binary.bin")
+ self.create_inputfile()
+
+ def tearDown(self):
+ if os.path.exists(self.tmpdir):
+ shutil.rmtree(self.tmpdir)
+
+ def test_splitFile_position(self):
+ position = [-1, 0, 256, 512, 700, 1024, 2048]
+ result = [(0, 1024), (0, 1024), (256, 768),
+ (512, 512), (700, 324), (1024, 0), (1024, 0)]
+ outputfolder = self.tmpdir
+ for index, po in enumerate(position):
+ try:
+ sp.splitFile(self.binary_file, po)
+ except Exception as e:
+ self.assertTrue(False, msg="splitFile function error")
+
+ output1 = os.path.join(outputfolder, "Binary.bin1")
+ output2 = os.path.join(outputfolder, "Binary.bin2")
+ with open(output1, "rb") as f1:
+ size1 = len(f1.read())
+ with open(output2, "rb") as f2:
+ size2 = len(f2.read())
+
+ ex_result = result[index]
+ self.assertEqual(size1, ex_result[0])
+ self.assertEqual(size2, ex_result[1])
+
+ def create_inputfile(self):
+ with open(self.binary_file, "wb") as fout:
+ for i in range(512):
+ fout.write(st.pack("<H", i))
+
+ def test_splitFile_outputfile(self):
+ output = [
+ None,
+ "Binary.bin",
+ "Binary1.bin",
+ r"output/Binary1.bin",
+ os.path.abspath( r"output/Binary1.bin")
+ ]
+ expected_output = [
+ os.path.join(os.path.dirname(self.binary_file),"Binary.bin1" ),
+ os.path.join(os.getcwd(),"Binary.bin"),
+ os.path.join(os.getcwd(),"Binary1.bin"),
+ os.path.join(os.getcwd(),r"output/Binary1.bin"),
+ os.path.join(os.path.abspath( r"output/Binary1.bin"))
+ ]
+ for index, o in enumerate(output):
+ try:
+ sp.splitFile(self.binary_file, 123, outputfile1=o)
+ except Exception as e:
+ self.assertTrue(False, msg="splitFile function error")
+
+ self.assertTrue(os.path.exists(expected_output[index]))
+ self.create_inputfile()
+
+ def test_splitFile_outputfolder(self):
+ outputfolder = [
+ None,
+ "output",
+ r"output1/output2",
+ os.path.abspath("output"),
+ "output"
+ ]
+ output = [
+ None,
+ None,
+ "Binary1.bin",
+ r"output/Binary1.bin",
+ os.path.abspath( r"output_1/Binary1.bin")
+ ]
+
+ expected_output = [
+ os.path.join(os.path.dirname(self.binary_file),"Binary.bin1" ),
+ os.path.join(os.getcwd(),"output", "Binary.bin1"),
+ os.path.join(os.getcwd(), r"output1/output2" , "Binary1.bin"),
+ os.path.join(os.getcwd(),r"output", "output/Binary1.bin"),
+ os.path.join(os.path.abspath( r"output/Binary1.bin"))
+ ]
+
+ for index, o in enumerate(outputfolder):
+ try:
+ sp.splitFile(self.binary_file, 123, outputdir=o,outputfile1=output[index])
+ except Exception as e:
+ self.assertTrue(False, msg="splitFile function error")
+
+ self.assertTrue(os.path.exists(expected_output[index]))
+ self.create_inputfile()
+
+
+if __name__ == '__main__':
+ unittest.main()